diff --git a/Solutions/Squadra Technologies SecRmm/Data Connectors/SquadraTechnologiesSecRMM.json b/Solutions/Squadra Technologies SecRmm/Data Connectors/SquadraTechnologiesSecRMM.json index 00d2e16729a..fe589640955 100644 --- a/Solutions/Squadra Technologies SecRmm/Data Connectors/SquadraTechnologiesSecRMM.json +++ b/Solutions/Squadra Technologies SecRmm/Data Connectors/SquadraTechnologiesSecRMM.json @@ -101,7 +101,7 @@ "instructionSteps": [ { "title": "", - "description": "Follow the step-by-step instructions provided in the [Squadra Technologies configuration guide for Azure Sentinel](https://www.squadratechnologies.com/StaticContent/ProductDownload/secRMM/9.11.0.0/secRMMAzureSentinelAdministratorGuide.pdf)", + "description": "Follow the step-by-step instructions provided in the [Squadra Technologies configuration guide for Microsoft Sentinel](https://www.squadratechnologies.com/StaticContent/ProductDownload/secRMM/9.11.0.0/secRMMAzureSentinelAdministratorGuide.pdf)", "instructions": [ { "parameters": { diff --git a/Solutions/Squadra Technologies SecRmm/Package/3.0.0.zip b/Solutions/Squadra Technologies SecRmm/Package/3.0.0.zip index a5e6f1f0e50..d4b03eab460 100644 Binary files a/Solutions/Squadra Technologies SecRmm/Package/3.0.0.zip and b/Solutions/Squadra Technologies SecRmm/Package/3.0.0.zip differ diff --git a/Solutions/Squadra Technologies SecRmm/Package/mainTemplate.json b/Solutions/Squadra Technologies SecRmm/Package/mainTemplate.json index f203f164b40..77a792a7246 100644 --- a/Solutions/Squadra Technologies SecRmm/Package/mainTemplate.json +++ b/Solutions/Squadra Technologies SecRmm/Package/mainTemplate.json @@ -40,7 +40,7 @@ "variables": { "_solutionName": "Squadra Technologies SecRmm", "_solutionVersion": "3.0.0", - "solutionId": "squadratechnologies.squadra_technologies_secrmm_mss", + "solutionId": "squadratechnologies.secrmmsentinel", "_solutionId": "[variables('solutionId')]", "uiConfigId1": "SquadraTechnologiesSecRMM", "_uiConfigId1": "[variables('uiConfigId1')]", @@ -193,7 +193,7 @@ }, "instructionSteps": [ { - "description": "Follow the step-by-step instructions provided in the [Squadra Technologies configuration guide for Azure Sentinel](https://www.squadratechnologies.com/StaticContent/ProductDownload/secRMM/9.11.0.0/secRMMAzureSentinelAdministratorGuide.pdf)", + "description": "Follow the step-by-step instructions provided in the [Squadra Technologies configuration guide for Microsoft Sentinel](https://www.squadratechnologies.com/StaticContent/ProductDownload/secRMM/9.11.0.0/secRMMAzureSentinelAdministratorGuide.pdf)", "instructions": [ { "parameters": { @@ -410,7 +410,7 @@ }, "instructionSteps": [ { - "description": "Follow the step-by-step instructions provided in the [Squadra Technologies configuration guide for Azure Sentinel](https://www.squadratechnologies.com/StaticContent/ProductDownload/secRMM/9.11.0.0/secRMMAzureSentinelAdministratorGuide.pdf)", + "description": "Follow the step-by-step instructions provided in the [Squadra Technologies configuration guide for Microsoft Sentinel](https://www.squadratechnologies.com/StaticContent/ProductDownload/secRMM/9.11.0.0/secRMMAzureSentinelAdministratorGuide.pdf)", "instructions": [ { "parameters": { diff --git a/Solutions/Squadra Technologies SecRmm/SolutionMetadata.json b/Solutions/Squadra Technologies SecRmm/SolutionMetadata.json index f03fc8ef08a..2c283fe1440 100644 --- a/Solutions/Squadra Technologies SecRmm/SolutionMetadata.json +++ b/Solutions/Squadra Technologies SecRmm/SolutionMetadata.json @@ -1,6 +1,6 @@ { "publisherId": "squadratechnologies", - "offerId": "squadra_technologies_secrmm_mss", + "offerId": "squadratechnologies.secrmmsentinel", "firstPublishDate": "2022-05-09", "providers": ["Squadra Technologies"], "categories": { diff --git a/Tools/Solutions Analyzer/connector-docs/solutions/squadra-technologies-secrmm.md b/Tools/Solutions Analyzer/connector-docs/solutions/squadra-technologies-secrmm.md new file mode 100644 index 00000000000..ff6a3698bd8 --- /dev/null +++ b/Tools/Solutions Analyzer/connector-docs/solutions/squadra-technologies-secrmm.md @@ -0,0 +1,55 @@ +# Squadra Technologies SecRmm + +## Solution Information + +| | | +|------------------------|-------| +| **Publisher** | Squadra Technologies | +| **Support Tier** | Partner | +| **Support Link** | [https://www.squadratechnologies.com/Contact.aspx](https://www.squadratechnologies.com/Contact.aspx) | +| **Categories** | domains | +| **First Published** | 2022-05-09 | +| **Solution Folder** | [https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Squadra%20Technologies%20SecRmm](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Squadra%20Technologies%20SecRmm) | + +## Data Connectors + +This solution provides **1 data connector(s)**. + +### [Squadra Technologies secRMM](../connectors/squadratechnologiessecrmm.md) + +**Publisher:** Squadra Technologies + +Use the Squadra Technologies secRMM Data Connector to push USB removable storage security event data into Microsoft Sentinel Log Analytics. + +**Permissions:** + +**Resource Provider Permissions:** +- **Workspace** (Workspace): read and write permissions are required. +- **Keys** (Workspace): read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key). + +**Setup Instructions:** + +> ⚠️ **Note**: These instructions were automatically generated from the connector's user interface definition file using AI and may not be fully accurate. Please verify all configuration steps in the Microsoft Sentinel portal. + +Follow the step-by-step instructions provided in the [Squadra Technologies configuration guide for Microsoft Sentinel](https://www.squadratechnologies.com/StaticContent/ProductDownload/secRMM/9.11.0.0/secRMMAzureSentinelAdministratorGuide.pdf) +- **Workspace ID**: `WorkspaceId` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* +- **Primary Key**: `PrimaryKey` + > *Note: The value above is dynamically provided when these instructions are presented within Microsoft Sentinel.* + +| | | +|--------------------------|---| +| **Tables Ingested** | `secRMM_CL` | +| **Connector Definition Files** | [SquadraTechnologiesSecRMM.json](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Squadra%20Technologies%20SecRmm/Data%20Connectors/SquadraTechnologiesSecRMM.json) | + +[→ View full connector details](../connectors/squadratechnologiessecrmm.md) + +## Tables Reference + +This solution ingests data into **1 table(s)**: + +| Table | Used By Connectors | +|-------|-------------------| +| `secRMM_CL` | [Squadra Technologies secRMM](../connectors/squadratechnologiessecrmm.md) | + +[← Back to Solutions Index](../solutions-index.md) diff --git a/Tools/Solutions Analyzer/solutions_connectors_tables_mapping.csv b/Tools/Solutions Analyzer/solutions_connectors_tables_mapping.csv index d8178a7a63d..4f0375b7977 100644 --- a/Tools/Solutions Analyzer/solutions_connectors_tables_mapping.csv +++ b/Tools/Solutions Analyzer/solutions_connectors_tables_mapping.csv @@ -1,1528 +1,1096 @@ -"Table","solution_name","solution_folder","solution_publisher_id","solution_offer_id","solution_first_publish_date","solution_last_publish_date","solution_version","solution_support_name","solution_support_tier","solution_support_link","solution_author_name","solution_categories","connector_id","connector_publisher","connector_title","connector_description","connector_instruction_steps","connector_permissions","connector_id_generated","connector_files","is_unique" -"","1Password","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/1Password","1password1617200969773","azure-sentinel-solution-1password","2023-12-01","","","1Password","Partner","https://support.1password.com/","","domains","","","","","","","false","","false" -"OnePasswordEventLogs_CL","1Password","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/1Password","1password1617200969773","azure-sentinel-solution-1password","2023-12-01","","","1Password","Partner","https://support.1password.com/","","domains","1Password","1Password","1Password","The [1Password](https://www.1password.com) solution for Microsoft Sentinel enables you to ingest 1Password logs and events into Microsoft Sentinel. The connector provides visibility into 1Password Events and Alerts in Microsoft Sentinel to improve monitoring and investigation capabilities.

**Underlying Microsoft Technologies used:**

This solution takes a dependency on the following technologies, and some of these dependencies either may be in [Preview](https://azure.microsoft.com/support/legal/preview-supplemental-terms/) state or might result in additional ingestion or operational costs:

- [Azure Functions](https://azure.microsoft.com/services/functions/#overview)","[{""description"": "">**NOTE:** This connector uses Azure Functions to connect to 1Password to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**STEP 1 - Configuration steps for the 1Password API**\n\n [Follow these instructions](https://support.1password.com/events-reporting/#appendix-issue-or-revoke-bearer-tokens) provided by 1Password to obtain an API Token. **Note:** A 1Password account is required""}, {""description"": ""**STEP 2 - Deploy the functionApp using DeployToAzure button to create the table, dcr and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the 1Password connector, a custom table needs to be created.""}, {""description"": ""This method provides an automated deployment of the 1Password connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-OnePassword-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace Name**, **Workspace Name**, **API Key**, and **URI**.\n - The default **Time Interval** is set to pull the last five (5) minutes of data. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion.\n - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy."", ""title"": ""Option 1 - Azure Resource Manager (ARM) Template""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""1Password API Token"", ""description"": ""A 1Password API Token is required. [See the documentation to learn more about the 1Password API](https://developer.1password.com/docs/events-api/reference). **Note:** A 1Password account is required""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/1Password/Data%20Connectors/deployment/1Password_data_connector.json","true" -"OnePasswordEventLogs_CL","1Password","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/1Password","1password1617200969773","azure-sentinel-solution-1password","2023-12-01","","","1Password","Partner","https://support.1password.com/","","domains","1Password","1Password","1Password","The [1Password](https://www.1password.com) solution for Microsoft Sentinel enables you to ingest sign-in attempts, item usage, and audit events from your 1Password Business account using the [1Password Events Reporting API](https://developer.1password.com/docs/events-api). This allows you to monitor and investigate events in 1Password in Microsoft Sentinel along with the other applications and services your organization uses.

**Underlying Microsoft Technologies used:**

This solution depends on the following technologies, and some of which may be in [Preview](https://azure.microsoft.com/support/legal/preview-supplemental-terms/) state or may incur additional ingestion or operational costs:

- [Azure Functions](https://azure.microsoft.com/services/functions/#overview)","[{""description"": "">**NOTE:** This connector uses Azure Functions to connect to 1Password to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs from Azure. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**STEP 1 - Configuration steps for the 1Password Events Reporting API**\n\n [Follow these instructions](https://support.1password.com/events-reporting/#appendix-issue-or-revoke-bearer-tokens) provided by 1Password to obtain an Events Reporting API Token. **Note:** A 1Password Business account is required""}, {""description"": ""**STEP 2 - Deploy the functionApp using DeployToAzure button to create the table, dcr and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the 1Password connector, a custom table needs to be created.""}, {""description"": ""This method provides an automated deployment of the 1Password connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-OnePassword-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace Name**, **Workspace Name**, **1Password Events API Key**, and **URI**.\n - The default **Time Interval** is set to five (5) minutes. If you'd like to modify the interval, you can adjust the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion.\n - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy."", ""title"": ""Option 1 - Azure Resource Manager (ARM) Template""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""1Password Events API Token"", ""description"": ""A 1Password Events API Token is required. [See the documentation to learn more about the 1Password API](https://developer.1password.com/docs/events-api/reference). \n\n**Note:** A 1Password Business account is required""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/1Password/Data%20Connectors/1Password_API_FunctionApp.json","true" -"OnePasswordEventLogs_CL","1Password","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/1Password","1password1617200969773","azure-sentinel-solution-1password","2023-12-01","","","1Password","Partner","https://support.1password.com/","","domains","1Password(Serverless)","Unknown (ARM variable)","1Password (Serverless)","The 1Password CCP connector allows the user to ingest 1Password Audit, Signin & ItemUsage events into Microsoft Sentinel.","[{""title"": ""STEP 1 - Create a 1Password API token:"", ""description"": ""Follow the [1Password documentation](https://support.1password.com/events-reporting/#appendix-issue-or-revoke-bearer-tokens) for guidance on this step.""}, {""title"": ""STEP 2 - Choose the correct base URL:"", ""description"": ""There are multiple 1Password servers which might host your events. The correct server depends on your license and region. Follow the [1Password documentation](https://developer.1password.com/docs/events-api/reference/#servers) to choose the correct server. Input the base URL as displayed by the documentation (including 'https://' and without a trailing '/').""}, {""title"": ""STEP 3 - Enter your 1Password Details:"", ""description"": ""Enter the 1Password base URL & API Token below:"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Base Url"", ""placeholder"": ""Enter your Base Url"", ""type"": ""text"", ""name"": ""BaseUrl""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Token"", ""placeholder"": ""Enter your API Token"", ""type"": ""password"", ""name"": ""ApiToken""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""1Password API token"", ""description"": ""A 1Password API Token is required. See the [1Password documentation](https://support.1password.com/events-reporting/#appendix-issue-or-revoke-bearer-tokens) on how to create an API token.""}]}","true","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/1Password/Data%20Connectors/1Password_ccpv2/azuredeploy_1Password_poller_connector.json","true" -"OnePasswordEventLogs_CL","1Password","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/1Password","1password1617200969773","azure-sentinel-solution-1password","2023-12-01","","","1Password","Partner","https://support.1password.com/","","domains","1PasswordCCPDefinition","1Password","1Password (Serverless)","The 1Password CCP connector allows the user to ingest 1Password Audit, Signin & ItemUsage events into Microsoft Sentinel.","[{""title"": ""STEP 1 - Create a 1Password API token:"", ""description"": ""Follow the [1Password documentation](https://support.1password.com/events-reporting/#appendix-issue-or-revoke-bearer-tokens) for guidance on this step.""}, {""title"": ""STEP 2 - Choose the correct base URL:"", ""description"": ""There are multiple 1Password servers which might host your events. The correct server depends on your license and region. Follow the [1Password documentation](https://developer.1password.com/docs/events-api/reference/#servers) to choose the correct server. Input the base URL as displayed by the documentation (including 'https://' and without a trailing '/').""}, {""title"": ""STEP 3 - Enter your 1Password Details:"", ""description"": ""Enter the 1Password base URL & API Token below:"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Base Url"", ""placeholder"": ""Enter your Base Url"", ""type"": ""text"", ""name"": ""BaseUrl""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Token"", ""placeholder"": ""Enter your API Token"", ""type"": ""password"", ""name"": ""ApiToken""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""1Password API token"", ""description"": ""A 1Password API Token is required. See the [1Password documentation](https://support.1password.com/events-reporting/#appendix-issue-or-revoke-bearer-tokens) on how to create an API token.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/1Password/Data%20Connectors/1Password_ccpv2/1Password_DataConnectorDefinition.json","true" -"","42Crunch API Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/42Crunch%20API%20Protection","42crunch1580391915541","42crunch_sentinel_solution","2022-09-21","","","42Crunch API Protection","Partner","https://42crunch.com/","","domains","","","","","","","false","","false" -"apifirewall_log_1_CL","42Crunch API Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/42Crunch%20API%20Protection","42crunch1580391915541","42crunch_sentinel_solution","2022-09-21","","","42Crunch API Protection","Partner","https://42crunch.com/","","domains","42CrunchAPIProtection","42Crunch","API Protection","Connects the 42Crunch API protection to Azure Log Analytics via the REST API interface","[{""title"": ""Step 1 : Read the detailed documentation"", ""description"": ""The installation process is documented in great detail in the GitHub repository [Microsoft Sentinel integration](https://github.com/42Crunch/azure-sentinel-integration). The user should consult this repository further to understand installation and debug of the integration.""}, {""title"": ""Step 2: Retrieve the workspace access credentials"", ""description"": ""The first installation step is to retrieve both your **Workspace ID** and **Primary Key** from the Microsoft Sentinel platform.\nCopy the values shown below and save them for configuration of the API log forwarder integration."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Step 3: Install the 42Crunch protection and log forwarder"", ""description"": ""The next step is to install the 42Crunch protection and log forwarder to protect your API. Both components are availabe as containers from the [42Crunch repository](https://hub.docker.com/u/42crunch). The exact installation will depend on your environment, consult the [42Crunch protection documentation](https://docs.42crunch.com/latest/content/concepts/api_firewall_deployment_architecture.htm) for full details. Two common installation scenarios are described below:\n"", ""innerSteps"": [{""title"": ""Installation via Docker Compose"", ""description"": ""The solution can be installed using a [Docker compose file](https://github.com/42Crunch/azure-sentinel-integration/blob/main/sample-deployment/docker-compose.yml).""}, {""title"": ""Installation via Helm charts"", ""description"": ""The solution can be installed using a [Helm chart](https://github.com/42Crunch/azure-sentinel-integration/tree/main/helm/sentinel).""}]}, {""title"": ""Step 4: Test the data ingestion"", ""description"": ""In order to test the data ingestion the user should deploy the sample *httpbin* application alongside the 42Crunch protection and log forwarder [described in detail here](https://github.com/42Crunch/azure-sentinel-integration/tree/main/sample-deployment)."", ""innerSteps"": [{""title"": ""4.1 Install the sample"", ""description"": ""The sample application can be installed locally using a [Docker compose file](https://github.com/42Crunch/azure-sentinel-integration/blob/main/sample-deployment/docker-compose.yml) which will install the httpbin API server, the 42Crunch API protection and the Microsoft Sentinel log forwarder. Set the environment variables as required using the values copied from step 2.""}, {""title"": ""4.2 Run the sample"", ""description"": ""Verfify the API protection is connected to the 42Crunch platform, and then exercise the API locally on the *localhost* at port 8080 using Postman, curl, or similar. You should see a mixture of passing and failing API calls. ""}, {""title"": ""4.3 Verify the data ingestion on Log Analytics"", ""description"": ""After approximately 20 minutes access the Log Analytics workspace on your Microsoft Sentinel installation, and locate the *Custom Logs* section verify that a *apifirewall_log_1_CL* table exists. Use the sample queries to examine the data.""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/42Crunch%20API%20Protection/Data%20Connectors/42CrunchAPIProtection.json","true" -"","AI Analyst Darktrace","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AI%20Analyst%20Darktrace","darktrace1655286944672","darktrace_mss","2022-05-02","","","Darktrace","Partner","https://www.darktrace.com/en/contact/","","domains","","","","","","","false","","false" -"CommonSecurityLog","AI Analyst Darktrace","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AI%20Analyst%20Darktrace","darktrace1655286944672","darktrace_mss","2022-05-02","","","Darktrace","Partner","https://www.darktrace.com/en/contact/","","domains","Darktrace","Darktrace","[Deprecated] AI Analyst Darktrace via Legacy Agent","The Darktrace connector lets users connect Darktrace Model Breaches in real-time with Microsoft Sentinel, allowing creation of custom Dashboards, Workbooks, Notebooks and Custom Alerts to improve investigation. Microsoft Sentinel's enhanced visibility into Darktrace logs enables monitoring and mitigation of security threats.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Configure Darktrace to forward Syslog messages in CEF format to your Azure workspace via the Syslog agent. \n\n 1) Within the Darktrace Threat Visualizer, navigate to the System Config page in the main menu under Admin. \n\n 2) From the left-hand menu, select Modules and choose Microsoft Sentinel from the available Workflow Integrations.\\n 3) A configuration window will open. Locate Microsoft Sentinel Syslog CEF and click New to reveal the configuration settings, unless already exposed. \n\n 4) In the Server configuration field, enter the location of the log forwarder and optionally modify the communication port. Ensure that the port selected is set to 514 and is allowed by any intermediary firewalls. \n\n 5) Configure any alert thresholds, time offsets or additional settings as required. \n\n 6) Review any additional configuration options you may wish to enable that alter the Syslog syntax.\n\n 7) Enable Send Alerts and save your changes.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AI%20Analyst%20Darktrace/Data%20Connectors/AIA-Darktrace.json","true" -"CommonSecurityLog","AI Analyst Darktrace","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AI%20Analyst%20Darktrace","darktrace1655286944672","darktrace_mss","2022-05-02","","","Darktrace","Partner","https://www.darktrace.com/en/contact/","","domains","DarktraceAma","Darktrace","[Deprecated] AI Analyst Darktrace via AMA","The Darktrace connector lets users connect Darktrace Model Breaches in real-time with Microsoft Sentinel, allowing creation of custom Dashboards, Workbooks, Notebooks and Custom Alerts to improve investigation. Microsoft Sentinel's enhanced visibility into Darktrace logs enables monitoring and mitigation of security threats.","[{""title"": """", ""description"": """", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine""}, {""title"": ""Step B. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Configure Darktrace to forward Syslog messages in CEF format to your Azure workspace via the Syslog agent. \n\n 1) Within the Darktrace Threat Visualizer, navigate to the System Config page in the main menu under Admin. \n\n 2) From the left-hand menu, select Modules and choose Microsoft Sentinel from the available Workflow Integrations.\\n 3) A configuration window will open. Locate Microsoft Sentinel Syslog CEF and click New to reveal the configuration settings, unless already exposed. \n\n 4) In the Server configuration field, enter the location of the log forwarder and optionally modify the communication port. Ensure that the port selected is set to 514 and is allowed by any intermediary firewalls. \n\n 5) Configure any alert thresholds, time offsets or additional settings as required. \n\n 6) Review any additional configuration options you may wish to enable that alter the Syslog syntax.\n\n 7) Enable Send Alerts and save your changes.""}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AI%20Analyst%20Darktrace/Data%20Connectors/template_AIA-DarktraceAMA.json","true" -"","AIShield AI Security Monitoring","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AIShield%20AI%20Security%20Monitoring","rbei","bgsw_aishield_sentinel","2022-01-11","2025-03-06","","AIShield","Partner","https://azuremarketplace.microsoft.com/marketplace/apps/rbei.bgsw_aishield_product/","","domains","","","","","","","false","","false" -"AIShield_CL","AIShield AI Security Monitoring","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AIShield%20AI%20Security%20Monitoring","rbei","bgsw_aishield_sentinel","2022-01-11","2025-03-06","","AIShield","Partner","https://azuremarketplace.microsoft.com/marketplace/apps/rbei.bgsw_aishield_product/","","domains","BoschAIShield","Bosch","AIShield","[AIShield](https://www.boschaishield.com/) connector allows users to connect with AIShield custom defense mechanism logs with Microsoft Sentinel, allowing the creation of dynamic Dashboards, Workbooks, Notebooks and tailored Alerts to improve investigation and thwart attacks on AI systems. It gives users more insight into their organization's AI assets security posturing and improves their AI systems security operation capabilities.AIShield.GuArdIan analyzes the LLM generated content to identify and mitigate harmful content, safeguarding against legal, policy, role based, and usage based violations","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**AIShield**](https://aka.ms/sentinel-boschaishield-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": ""\n>**IMPORTANT:** Before deploying the AIShield Connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Note"", ""description"": ""Users should have utilized AIShield SaaS offering to conduct vulnerability analysis and deployed custom defense mechanisms generated along with their AI asset. [**Click here**](https://azuremarketplace.microsoft.com/marketplace/apps/rbei.bgsw_aishield_product) to know more or get in touch.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AIShield%20AI%20Security%20Monitoring/Data%20Connectors/AIShieldConnector.json","true" -"","ALC-WebCTRL","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ALC-WebCTRL","azuresentinel","azure-sentinel-solution-automated-logic-webctrl","2021-11-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"Event","ALC-WebCTRL","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ALC-WebCTRL","azuresentinel","azure-sentinel-solution-automated-logic-webctrl","2021-11-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AutomatedLogicWebCTRL","AutomatedLogic","Automated Logic WebCTRL ","You can stream the audit logs from the WebCTRL SQL server hosted on Windows machines connected to your Microsoft Sentinel. This connection enables you to view dashboards, create custom alerts and improve investigation. This gives insights into your Industrial Control Systems that are monitored or controlled by the WebCTRL BAS application.","[{""title"": ""1. Install and onboard the Microsoft agent for Windows."", ""description"": ""Learn about [agent setup](https://docs.microsoft.com/services-hub/health/mma-setup) and [windows events onboarding](https://docs.microsoft.com/azure/azure-monitor/agents/data-sources-windows-events). \n\n You can skip this step if you have already installed the Microsoft agent for Windows""}, {""title"": ""2. Configure Windows task to read the audit data and write it to windows events"", ""description"": ""Install and configure the Windows Scheduled Task to read the audit logs in SQL and write them as Windows Events. These Windows Events will be collected by the agent and forward to Microsoft Sentinel.\n\n> Notice that the data from all machines will be stored in the selected workspace"", ""innerSteps"": [{""title"": """", ""description"": ""2.1 Copy the [setup files](https://aka.ms/sentinel-automatedlogicwebctrl-tasksetup) to a location on the server.""}, {""title"": """", ""description"": ""2.2 Update the [ALC-WebCTRL-AuditPull.ps1](https://aka.ms/sentinel-automatedlogicwebctrl-auditpull) (copied in above step) script parameters like the target database name and windows event id's. Refer comments in the script for more details.""}, {""title"": """", ""description"": ""2.3 Update the windows task settings in the [ALC-WebCTRL-AuditPullTaskConfig.xml](https://aka.ms/sentinel-automatedlogicwebctrl-auditpulltaskconfig) file that was copied in above step as per requirement. Refer comments in the file for more details.""}, {""title"": """", ""description"": ""2.4 Install windows tasks using the updated configs copied in the above steps"", ""instructions"": [{""parameters"": {""label"": ""Run the following command in powershell from the directory where the setup files are copied in step 2.1"", ""value"": ""schtasks.exe /create /XML \""ALC-WebCTRL-AuditPullTaskConfig.xml\"" /tn \""ALC-WebCTRL-AuditPull\""""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the Event schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, validate below steps for any run time issues:\n\n> 1. Make sure that the scheduled task is created and is in running state in the Windows Task Scheduler.\n\n>2. Check for task execution errors in the history tab in Windows Task Scheduler for the newly created task in step 2.4\n\n>3. Make sure that the SQL Audit table consists new records while the scheduled windows task runs.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ALC-WebCTRL/Data%20Connectors/Connector_WindowsEvents_WebCTRL.json","true" -"","ARGOSCloudSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ARGOSCloudSecurity","argoscloudsecurity1605618416175","argos-sentinel","2022-08-16","","","ARGOS Cloud Security","Partner","https://argos-security.io/contact-us","","domains","","","","","","","false","","false" -"ARGOS_CL","ARGOSCloudSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ARGOSCloudSecurity","argoscloudsecurity1605618416175","argos-sentinel","2022-08-16","","","ARGOS Cloud Security","Partner","https://argos-security.io/contact-us","","domains","ARGOSCloudSecurity","ARGOS Cloud Security","ARGOS Cloud Security","The ARGOS Cloud Security integration for Microsoft Sentinel allows you to have all your important cloud security events in one place. This enables you to easily create dashboards, alerts, and correlate events across multiple systems. Overall this will improve your organization's security posture and security incident response.","[{""title"": ""1. Subscribe to ARGOS"", ""description"": ""Ensure you already own an ARGOS Subscription. If not, browse to [ARGOS Cloud Security](https://argos-security.io) and sign up to ARGOS.\n\nAlternatively, you can also purchase ARGOS via the [Azure Marketplace](https://azuremarketplace.microsoft.com/en-au/marketplace/apps/argoscloudsecurity1605618416175.argoscloudsecurity?tab=Overview).""}, {""title"": ""2. Configure Sentinel integration from ARGOS"", ""description"": ""Configure ARGOS to forward any new detections to your Sentinel workspace by providing ARGOS with your Workspace ID and Primary Key.\n\nThere is **no need to deploy any custom infrastructure**.\n\nEnter the information into the [ARGOS Sentinel](https://app.argos-security.io/account/sentinel) configuration page.\n\nNew detections will automatically be forwarded.\n\n[Learn more about the integration](https://www.argos-security.io/resources#integrations)"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ARGOSCloudSecurity/Data%20Connectors/Connector_ARGOS.json","true" -"","AWS CloudFront","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AWS%20CloudFront","azuresentinel","azure-sentinel-solution-aws-cloudfront","2025-03-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"AWSCloudFront_AccessLog_CL","AWS CloudFront","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AWS%20CloudFront","azuresentinel","azure-sentinel-solution-aws-cloudfront","2025-03-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AwsCloudfrontCcpDefinition","Microsoft","Amazon Web Services CloudFront (via Codeless Connector Framework) (Preview)","This data connector enables the integration of AWS CloudFront logs with Microsoft Sentinel to support advanced threat detection, investigation, and security monitoring. By utilizing Amazon S3 for log storage and Amazon SQS for message queuing, the connector reliably ingests CloudFront access logs into Microsoft Sentinel","[{""title"": ""Ingesting AWS CloudFront logs in Microsoft Sentinel"", ""description"": ""### List of Resources Required:\n\n* Open ID Connect (OIDC) web identity provider\n* IAM Role\n* Amazon S3 Bucket\n* Amazon SQS\n* AWS CloudFront configuration\n\n"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. AWS CloudFormation Deployment \n To configure access on AWS, two templates has been generated to set up the AWS environment to send logs from an S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). \n 2. Choose the \u2018**Specify template**\u2019 option, then \u2018**Upload a template file**\u2019 by clicking on \u2018**Choose file**\u2019 and selecting the appropriate CloudFormation template file provided below. click \u2018**Choose file**\u2019 and select the downloaded template. \n 3. Click '**Next**' and '**Create stack**'.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWSCloudFront resources deployment"", ""isMultiLine"": true, ""fillWith"": [""AWSCloudFront""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AWS%20CloudFront/Data%20Connectors/AWSCloudFrontLog_CCF/AWSCloudFrontLog_ConnectorDefinition.json","true" -"","AWS Security Hub","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AWS%20Security%20Hub","azuresentinel","azure-sentinel-solution-awssecurityhub","2025-03-12","2025-03-12","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"AWSSecurityHubFindings","AWS Security Hub","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AWS%20Security%20Hub","azuresentinel","azure-sentinel-solution-awssecurityhub","2025-03-12","2025-03-12","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AwsSecurityHubFindingsCcpDefinition","Microsoft","AWS Security Hub Findings (via Codeless Connector Framework)","This connector enables the ingestion of AWS Security Hub Findings, which are collected in AWS S3 buckets, into Microsoft Sentinel. It helps streamline the process of monitoring and managing security alerts by integrating AWS Security Hub Findings with Microsoft Sentinel's advanced threat detection and response capabilities.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""### 1. AWS CloudFormation Deployment \n Use the provided CloudFormation templates to configure the AWS environment for sending logs from AWS Security Hub to your Log Analytics Workspace.\n""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### Deploy CloudFormation Templates in AWS: \n1. Navigate to the [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create).\n2. Click **Create stack** and select **With new resources**.\n3. Choose **Upload a template file**, then click **Choose file** to upload the appropriate CloudFormation template provided.\n4. Follow the prompts and click **Next** to complete the stack creation.\n5. After the stacks are created, note down the **Role ARN** and **SQS Queue URL**.\n""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID Connect authentication provider deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS Security Hub resources deployment"", ""isMultiLine"": true, ""fillWith"": [""AwsSecurityHub""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""### 2. Connect new collectors \n To enable AWS Security Hub Connector for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS Security Hub connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": false, ""write"": false, ""delete"": false, ""action"": true}}], ""customs"": [{""name"": ""Environment"", ""description"": ""You must have the following AWS resources defined and configured: AWS Security Hub, Amazon Data Firehose, Amazon EventBridge, S3 Bucket, Simple Queue Service (SQS), IAM roles and permissions policies.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AWS%20Security%20Hub/Data%20Connectors/AWSSecurityHubFindings_CCP/AWSSecurityHubFindings_DataConnectorDefinition.json","true" -"","AWS Systems Manager","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AWS%20Systems%20Manager","azuresentinel","azure-sentinel-solution-awssystemsmanager","","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"","AWS VPC Flow Logs","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AWS%20VPC%20Flow%20Logs","azuresentinel","azure-sentinel-solution-awsvpcflowlogs","2025-07-30","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"AWSVPCFlow","AWS VPC Flow Logs","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AWS%20VPC%20Flow%20Logs","azuresentinel","azure-sentinel-solution-awsvpcflowlogs","2025-07-30","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AWSS3VPCFlowLogsParquetDefinition","Microsoft","Amazon Web Services S3 VPC Flow Logs","This connector allows you to ingest AWS VPC Flow Logs, collected in AWS S3 buckets, to Microsoft Sentinel. AWS VPC Flow Logs provide visibility into network traffic within your AWS Virtual Private Cloud (VPC), enabling security analysis and network monitoring.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. AWS CloudFormation Deployment \n To configure access on AWS, two templates have been generated to set up the AWS environment to send VPC Flow Logs from an S3 bucket to your Log Analytics Workspace.\n #### For each template, create a Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). \n 2. Choose the \u2018Specify template\u2019 option, then \u2018Upload a template file\u2019 by clicking on \u2018Choose file\u2019 and selecting the appropriate CloudFormation template file provided below. Click \u2018Choose file\u2019 and select the downloaded template. \n 3. Click 'Next' and 'Create stack'.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS VPC Flow Logs resources deployment"", ""isMultiLine"": true, ""fillWith"": [""AwsVPCFlow""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the 'Add new collector' button, fill in the required information and click on 'Connect'""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.fileFormat"", ""columnName"": ""File Format""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS VPC Flow Logs connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""fileFormat"", ""required"": true, ""placeholder"": ""Select a file format"", ""options"": [{""key"": ""Json"", ""text"": ""JSON Format""}, {""key"": ""Parquet"", ""text"": ""Parquet Format""}, {""key"": ""Csv"", ""text"": ""CSV Format""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AWS%20VPC%20Flow%20Logs/Data%20Connectors/AWSVPCFlowLogs_CCP/AWSVPCFlowLogs_DataConnectorDefinition.json","true" -"","AWSAthena","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AWSAthena","azuresentinel","azure-sentinel-solution-awsathena","2022-11-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"","AWS_AccessLogs","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AWS_AccessLogs","azuresentinel","azure-sentinel-solution-awsaccesslogs","2025-02-06","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"AWSS3ServerAccess","AWS_AccessLogs","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AWS_AccessLogs","azuresentinel","azure-sentinel-solution-awsaccesslogs","2025-02-06","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AwsS3ServerAccessLogsDefinition","Microsoft","AWS S3 Server Access Logs (via Codeless Connector Framework)","This connector allows you to ingest AWS S3 Server Access Logs into Microsoft Sentinel. These logs contain detailed records for requests made to S3 buckets, including the type of request, resource accessed, requester information, and response details. These logs are useful for analyzing access patterns, debugging issues, and ensuring security compliance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""### 1. AWS CloudFormation Deployment \n To configure access on AWS, two templates has been generated to set up the AWS environment to send logs from an AWS S3 Server Access logs to your Log Analytics Workspace.\n""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### Deploy CloudFormation Templates in AWS: \n1. Navigate to the [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create).\n2. Click **Create stack** and select **With new resources**.\n3. Choose **Upload a template file**, then click **Choose file** to upload the appropriate CloudFormation template provided.\n4. Follow the prompts and click **Next** to complete the stack creation.\n5. After the stacks are created, note down the **Role ARN** and **SQS Queue URL**.\n""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID Connect authentication provider deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS Server Access resources deployment"", ""isMultiLine"": true, ""fillWith"": [""AWSS3ServerAccess""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""### 2. Connect new collectors \n To enable AWS S3 Server Access Logs Connector for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new collector"", ""subtitle"": ""AWS Server Access Logs connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""providerDisplayName"": ""Workspace"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": false, ""write"": false, ""delete"": false, ""action"": true}}], ""customs"": [{""name"": ""Environment"", ""description"": ""You must have the following AWS resources defined and configured: S3 Bucket, Simple Queue Service (SQS), IAM roles and permissions policies.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AWS_AccessLogs/Data%20Connectors/AwsS3ServerAccessLogsDefinition_CCP/AWSS3ServerAccessLogs_ConnectorDefinition.json","true" -"","AWS_IAM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AWS_IAM","azuresentinel","azure-sentinel-solution-amazonwebservicesiam","2022-09-28","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"","AbnormalSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AbnormalSecurity","abnormalsecuritycorporation1593011233180","fe1b4806-215b-4610-bf95-965a7a65579c","2021-10-20","","","Abnormal Security","Partner","https://abnormalsecurity.com/contact","","domains","","","","","","","false","","false" -"ABNORMAL_CASES_CL","AbnormalSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AbnormalSecurity","abnormalsecuritycorporation1593011233180","fe1b4806-215b-4610-bf95-965a7a65579c","2021-10-20","","","Abnormal Security","Partner","https://abnormalsecurity.com/contact","","domains","AbnormalSecurity","AbnormalSecurity","AbnormalSecurity ","The Abnormal Security data connector provides the capability to ingest threat and case logs into Microsoft Sentinel using the [Abnormal Security Rest API.](https://app.swaggerhub.com/apis/abnormal-security/abx/)","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to Abnormal Security's REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Abnormal Security API**\n\n [Follow these instructions](https://app.swaggerhub.com/apis/abnormal-security/abx) provided by Abnormal Security to configure the REST API integration. **Note:** An Abnormal Security account is required""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Abnormal Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Abnormal Security API Authorization Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""This method provides an automated deployment of the Abnormal Security connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-abnormalsecurity-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Microsoft Sentinel Workspace ID**, **Microsoft Sentinel Shared Key** and **Abnormal Security REST API Key**.\n - The default **Time Interval** is set to pull the last five (5) minutes of data. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion.\n 4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Abnormal Security data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-abnormalsecurity-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. AbnormalSecurityXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tSENTINEL_WORKSPACE_ID\n\t\tSENTINEL_SHARED_KEY\n\t\tABNORMAL_SECURITY_REST_API_TOKEN\n\t\tlogAnalyticsUri (optional)\n(add any other settings required by the Function App)\nSet the `uri` value to: `` \n>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Azure Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us.` \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Abnormal Security API Token"", ""description"": ""An Abnormal Security API Token is required. [See the documentation to learn more about Abnormal Security API](https://app.swaggerhub.com/apis/abnormal-security/abx/). **Note:** An Abnormal Security account is required""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AbnormalSecurity/Data%20Connectors/AbnormalSecurity_API_FunctionApp.json","true" -"ABNORMAL_THREAT_MESSAGES_CL","AbnormalSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AbnormalSecurity","abnormalsecuritycorporation1593011233180","fe1b4806-215b-4610-bf95-965a7a65579c","2021-10-20","","","Abnormal Security","Partner","https://abnormalsecurity.com/contact","","domains","AbnormalSecurity","AbnormalSecurity","AbnormalSecurity ","The Abnormal Security data connector provides the capability to ingest threat and case logs into Microsoft Sentinel using the [Abnormal Security Rest API.](https://app.swaggerhub.com/apis/abnormal-security/abx/)","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to Abnormal Security's REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Abnormal Security API**\n\n [Follow these instructions](https://app.swaggerhub.com/apis/abnormal-security/abx) provided by Abnormal Security to configure the REST API integration. **Note:** An Abnormal Security account is required""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Abnormal Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Abnormal Security API Authorization Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""This method provides an automated deployment of the Abnormal Security connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-abnormalsecurity-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Microsoft Sentinel Workspace ID**, **Microsoft Sentinel Shared Key** and **Abnormal Security REST API Key**.\n - The default **Time Interval** is set to pull the last five (5) minutes of data. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion.\n 4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Abnormal Security data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-abnormalsecurity-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. AbnormalSecurityXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tSENTINEL_WORKSPACE_ID\n\t\tSENTINEL_SHARED_KEY\n\t\tABNORMAL_SECURITY_REST_API_TOKEN\n\t\tlogAnalyticsUri (optional)\n(add any other settings required by the Function App)\nSet the `uri` value to: `` \n>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Azure Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us.` \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Abnormal Security API Token"", ""description"": ""An Abnormal Security API Token is required. [See the documentation to learn more about Abnormal Security API](https://app.swaggerhub.com/apis/abnormal-security/abx/). **Note:** An Abnormal Security account is required""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AbnormalSecurity/Data%20Connectors/AbnormalSecurity_API_FunctionApp.json","true" -"","AbuseIPDB","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AbuseIPDB","azuresentinel","azure-sentinel-solution-abuseipdb","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"","Acronis Cyber Protect Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Acronis%20Cyber%20Protect%20Cloud","acronisinternationalgmbh","azure-sentinel-solution-acronis-cyber-protect","2025-10-28","2025-10-28","","Acronis International GmbH","Partner","https://www.acronis.com/en/support","","domains,verticals","","","","","","","false","","false" -"","Agari","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Agari","agari","Agari_MSS","2022-05-02","","","Agari","Partner","https://support.agari.com/hc/en-us/articles/360000645632-How-to-access-Agari-Support","","domains","","","","","","","false","","false" -"agari_apdpolicy_log_CL","Agari","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Agari","agari","Agari_MSS","2022-05-02","","","Agari","Partner","https://support.agari.com/hc/en-us/articles/360000645632-How-to-access-Agari-Support","","domains","Agari","Agari","Agari Phishing Defense and Brand Protection","This connector uses a Agari REST API connection to push data into Azure Sentinel Log Analytics.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Agari APIs to pull its logs into Azure Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""STEP 1 - Get your Agari API credentials"", ""description"": ""\n1. Log into any Agari product (Client ID and Secret are the same for all applications) \n2. Click on your username in the upper right and select **Settings**\n3. Click on the **Generate API Secret** link to generate an API client_id and client_secret (the link will read **Regenerate API Secret** if you have already generated an API client ID/secret previously)\n4. Copy both the client_id and client_secret that are generated""}, {""title"": ""STEP 2 - (Optional) Enable the Security Graph API"", ""description"": ""Follow the instrcutions found on article [Connect Azure Sentinel to your threat intelligence platform](https://docs.microsoft.com/azure/sentinel/connect-threat-intelligence#connect-azure-sentinel-to-your-threat-intelligence-platform). Once the application is created you will need to record the Tenant ID, Client ID and Client Secret.""}, {""title"": ""STEP 3 - Deploy the connector and the associated Azure Function"", ""description"": ""\n>**IMPORTANT:** Before deploying the Agari Connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Agari API credentials from the previous step."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Choose a deployement option"", ""description"": """"}, {""title"": ""Option 1: Deploy using the Azure Resource Manager (ARM) Template"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-agari-azuredeploy) \n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **Agari Client ID**, **Agari Client Secret**, select `True` or `False` for the products you subscribe to, and if you wish to share IoCs with Sentinel, select `True` For **Enable Security Graph Sharing**, and enter the required IDs from the Azure Application.\n> - The Function App will request data from the Agari APIs every 5 minutes, corresponding to the Funciton App Timer.\n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n6. **NOTE:** Due to the use of Environment Variables to store log access times, the App requires 1 additonal manual step. In the Function App, select the Function App Name and select Click on **Identity** and for System assigned Identity, click on **Azure role assignments** and **Add Role assignment**. Select **Subscription** as the scope, select your subscription and set the Role to **Contributor**. Click on **Save**.""}, {""title"": ""Option 2: Manual Deployment of Azure Functions"", ""description"": ""**1. Create a Function App**\n\n1. From the Azure Portal, navigate to [Function App](https://portal.azure.com/#blade/HubsExtension/BrowseResource/resourceType/Microsoft.Web%2Fsites/kind/functionapp), and select **+ Add**.\n2. In the **Basics** tab, ensure Runtime stack is set to **Powershell Core**. \n3. In the **Hosting** tab, ensure the **Consumption (Serverless)** plan type is selected.\n4. Make other preferrable configuration changes, if needed, then click **Create**.""}, {""title"": """", ""description"": ""**2. Import Function App Code**\n\n1. In the newly created Function App, select **Functions** on the left pane and click **+ Add**.\n2. Click on **Code + Test** on the left pane. \n3. Copy the [Function App Code](https://aka.ms/sentinel-agari-functionapp) and paste into the Function App `run.ps1` editor.\n3. Click **Save**.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following eight to twelve (8-12) application settings individually, with their respective string values (case-sensitive): \n\t\tclientID\n\t\tclientSecret\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\tenableBrandProtectionAPI\n\t\tenablePhishingResponseAPI\n\t\tenablePhishingDefenseAPI\n\t\tresGroup\n\t\tfunctionName\n\t\tsubId\n\t\tenableSecurityGraphSharing\n\t\t<--- Required if enableSecurityGraphSharing is set to true --->\n\t\tGraphTenantId\n\t\tGraphClientId\n\t\tGraphClientSecret\n\t\tlogAnalyticsUri (optional)\n> - Enter your Agari ClientID and Secret in 'clientId' and 'clientSecret'\n> - Enter 'true' or 'false' for 'enablePhishingDefense', 'enableBrandProtection', 'enablePhishingResponse' as per your product subscriptions.\n> - Enter your Resource Group name in resGroup, the name of the Function (from previous step) in functionName and your Subscription ID in subId.\n> - Enter 'true' or 'false' for 'enableSecurtyGraphAPI'. If you are enabling the Security Graph, the 'GraphTenantId','GraphClientId', and 'GraphClientSecret' is required.\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n""}, {""title"": """", ""description"": ""**4. Set Permissions for the App**\n\n1. In the Function App, select the Function App Name and select Click on **Identity** and for System assigned Identity, set the status to On. \n\n2. Next, click on **Azure role assignments** and **Add Role assignment**. Select **Subscription** as the scope, select your subscription and set the Role to **Contributor**. Click on **Save**.""}, {""title"": """", ""description"": ""**5. Complete Setup.**\n\n1. Once all application settings have been entered, click **Save**. Note that it will take some time to have the required dependencies download, so you may see some inital failure messages.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Agari Phishing Defense, Phishing Response or Brand Protection API Client ID and Secret"", ""description"": ""Ensure you have your Client ID and Secret keys. Instructions can be found on the [Agari Developers Site](https://developers.agari.com/agari-platform/docs/quick-start).""}, {""name"": ""(Optional) Microsoft Security Graph API"", ""description"": ""The Agari Function App has the ability to share threat intelleigence with Sentinel via the Security Graph API. To use this feature, you will need to enable the [Sentinel Threat Intelligence Platforms connector](https://docs.microsoft.com/azure/sentinel/connect-threat-intelligence) as well as register an application in Azure Active Directory. ""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Agari/Data%20Connectors/Agari_API_FunctionApp.json","true" -"agari_apdtc_log_CL","Agari","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Agari","agari","Agari_MSS","2022-05-02","","","Agari","Partner","https://support.agari.com/hc/en-us/articles/360000645632-How-to-access-Agari-Support","","domains","Agari","Agari","Agari Phishing Defense and Brand Protection","This connector uses a Agari REST API connection to push data into Azure Sentinel Log Analytics.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Agari APIs to pull its logs into Azure Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""STEP 1 - Get your Agari API credentials"", ""description"": ""\n1. Log into any Agari product (Client ID and Secret are the same for all applications) \n2. Click on your username in the upper right and select **Settings**\n3. Click on the **Generate API Secret** link to generate an API client_id and client_secret (the link will read **Regenerate API Secret** if you have already generated an API client ID/secret previously)\n4. Copy both the client_id and client_secret that are generated""}, {""title"": ""STEP 2 - (Optional) Enable the Security Graph API"", ""description"": ""Follow the instrcutions found on article [Connect Azure Sentinel to your threat intelligence platform](https://docs.microsoft.com/azure/sentinel/connect-threat-intelligence#connect-azure-sentinel-to-your-threat-intelligence-platform). Once the application is created you will need to record the Tenant ID, Client ID and Client Secret.""}, {""title"": ""STEP 3 - Deploy the connector and the associated Azure Function"", ""description"": ""\n>**IMPORTANT:** Before deploying the Agari Connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Agari API credentials from the previous step."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Choose a deployement option"", ""description"": """"}, {""title"": ""Option 1: Deploy using the Azure Resource Manager (ARM) Template"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-agari-azuredeploy) \n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **Agari Client ID**, **Agari Client Secret**, select `True` or `False` for the products you subscribe to, and if you wish to share IoCs with Sentinel, select `True` For **Enable Security Graph Sharing**, and enter the required IDs from the Azure Application.\n> - The Function App will request data from the Agari APIs every 5 minutes, corresponding to the Funciton App Timer.\n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n6. **NOTE:** Due to the use of Environment Variables to store log access times, the App requires 1 additonal manual step. In the Function App, select the Function App Name and select Click on **Identity** and for System assigned Identity, click on **Azure role assignments** and **Add Role assignment**. Select **Subscription** as the scope, select your subscription and set the Role to **Contributor**. Click on **Save**.""}, {""title"": ""Option 2: Manual Deployment of Azure Functions"", ""description"": ""**1. Create a Function App**\n\n1. From the Azure Portal, navigate to [Function App](https://portal.azure.com/#blade/HubsExtension/BrowseResource/resourceType/Microsoft.Web%2Fsites/kind/functionapp), and select **+ Add**.\n2. In the **Basics** tab, ensure Runtime stack is set to **Powershell Core**. \n3. In the **Hosting** tab, ensure the **Consumption (Serverless)** plan type is selected.\n4. Make other preferrable configuration changes, if needed, then click **Create**.""}, {""title"": """", ""description"": ""**2. Import Function App Code**\n\n1. In the newly created Function App, select **Functions** on the left pane and click **+ Add**.\n2. Click on **Code + Test** on the left pane. \n3. Copy the [Function App Code](https://aka.ms/sentinel-agari-functionapp) and paste into the Function App `run.ps1` editor.\n3. Click **Save**.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following eight to twelve (8-12) application settings individually, with their respective string values (case-sensitive): \n\t\tclientID\n\t\tclientSecret\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\tenableBrandProtectionAPI\n\t\tenablePhishingResponseAPI\n\t\tenablePhishingDefenseAPI\n\t\tresGroup\n\t\tfunctionName\n\t\tsubId\n\t\tenableSecurityGraphSharing\n\t\t<--- Required if enableSecurityGraphSharing is set to true --->\n\t\tGraphTenantId\n\t\tGraphClientId\n\t\tGraphClientSecret\n\t\tlogAnalyticsUri (optional)\n> - Enter your Agari ClientID and Secret in 'clientId' and 'clientSecret'\n> - Enter 'true' or 'false' for 'enablePhishingDefense', 'enableBrandProtection', 'enablePhishingResponse' as per your product subscriptions.\n> - Enter your Resource Group name in resGroup, the name of the Function (from previous step) in functionName and your Subscription ID in subId.\n> - Enter 'true' or 'false' for 'enableSecurtyGraphAPI'. If you are enabling the Security Graph, the 'GraphTenantId','GraphClientId', and 'GraphClientSecret' is required.\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n""}, {""title"": """", ""description"": ""**4. Set Permissions for the App**\n\n1. In the Function App, select the Function App Name and select Click on **Identity** and for System assigned Identity, set the status to On. \n\n2. Next, click on **Azure role assignments** and **Add Role assignment**. Select **Subscription** as the scope, select your subscription and set the Role to **Contributor**. Click on **Save**.""}, {""title"": """", ""description"": ""**5. Complete Setup.**\n\n1. Once all application settings have been entered, click **Save**. Note that it will take some time to have the required dependencies download, so you may see some inital failure messages.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Agari Phishing Defense, Phishing Response or Brand Protection API Client ID and Secret"", ""description"": ""Ensure you have your Client ID and Secret keys. Instructions can be found on the [Agari Developers Site](https://developers.agari.com/agari-platform/docs/quick-start).""}, {""name"": ""(Optional) Microsoft Security Graph API"", ""description"": ""The Agari Function App has the ability to share threat intelleigence with Sentinel via the Security Graph API. To use this feature, you will need to enable the [Sentinel Threat Intelligence Platforms connector](https://docs.microsoft.com/azure/sentinel/connect-threat-intelligence) as well as register an application in Azure Active Directory. ""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Agari/Data%20Connectors/Agari_API_FunctionApp.json","true" -"agari_bpalerts_log_CL","Agari","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Agari","agari","Agari_MSS","2022-05-02","","","Agari","Partner","https://support.agari.com/hc/en-us/articles/360000645632-How-to-access-Agari-Support","","domains","Agari","Agari","Agari Phishing Defense and Brand Protection","This connector uses a Agari REST API connection to push data into Azure Sentinel Log Analytics.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Agari APIs to pull its logs into Azure Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""STEP 1 - Get your Agari API credentials"", ""description"": ""\n1. Log into any Agari product (Client ID and Secret are the same for all applications) \n2. Click on your username in the upper right and select **Settings**\n3. Click on the **Generate API Secret** link to generate an API client_id and client_secret (the link will read **Regenerate API Secret** if you have already generated an API client ID/secret previously)\n4. Copy both the client_id and client_secret that are generated""}, {""title"": ""STEP 2 - (Optional) Enable the Security Graph API"", ""description"": ""Follow the instrcutions found on article [Connect Azure Sentinel to your threat intelligence platform](https://docs.microsoft.com/azure/sentinel/connect-threat-intelligence#connect-azure-sentinel-to-your-threat-intelligence-platform). Once the application is created you will need to record the Tenant ID, Client ID and Client Secret.""}, {""title"": ""STEP 3 - Deploy the connector and the associated Azure Function"", ""description"": ""\n>**IMPORTANT:** Before deploying the Agari Connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Agari API credentials from the previous step."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Choose a deployement option"", ""description"": """"}, {""title"": ""Option 1: Deploy using the Azure Resource Manager (ARM) Template"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-agari-azuredeploy) \n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **Agari Client ID**, **Agari Client Secret**, select `True` or `False` for the products you subscribe to, and if you wish to share IoCs with Sentinel, select `True` For **Enable Security Graph Sharing**, and enter the required IDs from the Azure Application.\n> - The Function App will request data from the Agari APIs every 5 minutes, corresponding to the Funciton App Timer.\n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n6. **NOTE:** Due to the use of Environment Variables to store log access times, the App requires 1 additonal manual step. In the Function App, select the Function App Name and select Click on **Identity** and for System assigned Identity, click on **Azure role assignments** and **Add Role assignment**. Select **Subscription** as the scope, select your subscription and set the Role to **Contributor**. Click on **Save**.""}, {""title"": ""Option 2: Manual Deployment of Azure Functions"", ""description"": ""**1. Create a Function App**\n\n1. From the Azure Portal, navigate to [Function App](https://portal.azure.com/#blade/HubsExtension/BrowseResource/resourceType/Microsoft.Web%2Fsites/kind/functionapp), and select **+ Add**.\n2. In the **Basics** tab, ensure Runtime stack is set to **Powershell Core**. \n3. In the **Hosting** tab, ensure the **Consumption (Serverless)** plan type is selected.\n4. Make other preferrable configuration changes, if needed, then click **Create**.""}, {""title"": """", ""description"": ""**2. Import Function App Code**\n\n1. In the newly created Function App, select **Functions** on the left pane and click **+ Add**.\n2. Click on **Code + Test** on the left pane. \n3. Copy the [Function App Code](https://aka.ms/sentinel-agari-functionapp) and paste into the Function App `run.ps1` editor.\n3. Click **Save**.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following eight to twelve (8-12) application settings individually, with their respective string values (case-sensitive): \n\t\tclientID\n\t\tclientSecret\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\tenableBrandProtectionAPI\n\t\tenablePhishingResponseAPI\n\t\tenablePhishingDefenseAPI\n\t\tresGroup\n\t\tfunctionName\n\t\tsubId\n\t\tenableSecurityGraphSharing\n\t\t<--- Required if enableSecurityGraphSharing is set to true --->\n\t\tGraphTenantId\n\t\tGraphClientId\n\t\tGraphClientSecret\n\t\tlogAnalyticsUri (optional)\n> - Enter your Agari ClientID and Secret in 'clientId' and 'clientSecret'\n> - Enter 'true' or 'false' for 'enablePhishingDefense', 'enableBrandProtection', 'enablePhishingResponse' as per your product subscriptions.\n> - Enter your Resource Group name in resGroup, the name of the Function (from previous step) in functionName and your Subscription ID in subId.\n> - Enter 'true' or 'false' for 'enableSecurtyGraphAPI'. If you are enabling the Security Graph, the 'GraphTenantId','GraphClientId', and 'GraphClientSecret' is required.\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n""}, {""title"": """", ""description"": ""**4. Set Permissions for the App**\n\n1. In the Function App, select the Function App Name and select Click on **Identity** and for System assigned Identity, set the status to On. \n\n2. Next, click on **Azure role assignments** and **Add Role assignment**. Select **Subscription** as the scope, select your subscription and set the Role to **Contributor**. Click on **Save**.""}, {""title"": """", ""description"": ""**5. Complete Setup.**\n\n1. Once all application settings have been entered, click **Save**. Note that it will take some time to have the required dependencies download, so you may see some inital failure messages.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Agari Phishing Defense, Phishing Response or Brand Protection API Client ID and Secret"", ""description"": ""Ensure you have your Client ID and Secret keys. Instructions can be found on the [Agari Developers Site](https://developers.agari.com/agari-platform/docs/quick-start).""}, {""name"": ""(Optional) Microsoft Security Graph API"", ""description"": ""The Agari Function App has the ability to share threat intelleigence with Sentinel via the Security Graph API. To use this feature, you will need to enable the [Sentinel Threat Intelligence Platforms connector](https://docs.microsoft.com/azure/sentinel/connect-threat-intelligence) as well as register an application in Azure Active Directory. ""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Agari/Data%20Connectors/Agari_API_FunctionApp.json","true" -"","AgileSec Analytics Connector","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AgileSec%20Analytics%20Connector","infosecglobal1632846037582","agilesec-analytics-connector","","","","InfoSecGlobal","Partner","https://www.infosecglobal.com/","","domains","","","","","","","false","","false" -"InfoSecAnalytics_CL","AgileSec Analytics Connector","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AgileSec%20Analytics%20Connector","infosecglobal1632846037582","agilesec-analytics-connector","","","","InfoSecGlobal","Partner","https://www.infosecglobal.com/","","domains","InfoSecDataConnector","InfoSecGlobal","InfoSecGlobal Data Connector","Use this data connector to integrate with InfoSec Crypto Analytics and get data sent directly to Microsoft Sentinel.","[{""title"": ""InfoSecGlobal Crypto Analytics Data Connector"", ""description"": ""1. Data is sent to Microsoft Sentinel through Logstash\n 2. Required Logstash configuration is included with Crypto Analytics installation\n 3. Documentation provided with the Crypto Analytics installation explains how to enable sending data to Microsoft Sentinel\n"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AgileSec%20Analytics%20Connector/Data%20Connectors/Connector_Analytics_InfoSec.json","true" -"","Akamai Security Events","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Akamai%20Security%20Events","azuresentinel","azure-sentinel-solution-akamai","2022-03-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"CommonSecurityLog","Akamai Security Events","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Akamai%20Security%20Events","azuresentinel","azure-sentinel-solution-akamai","2022-03-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AkamaiSecurityEvents","Akamai","[Deprecated] Akamai Security Events via Legacy Agent","Akamai Solution for Microsoft Sentinel provides the capability to ingest [Akamai Security Events](https://www.akamai.com/us/en/products/security/) into Microsoft Sentinel. Refer to [Akamai SIEM Integration documentation](https://developer.akamai.com/tools/integrations/siem) for more information.","[{""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Akamai Security Events and load the function code or click [here](https://aka.ms/sentinel-akamaisecurityevents-parser), on the second line of the query, enter the hostname(s) of your Akamai Security Events device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""[Follow these steps](https://developer.akamai.com/tools/integrations/siem) to configure Akamai CEF connector to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Akamai%20Security%20Events/Data%20Connectors/Connector_CEF_Akamai.json","true" -"CommonSecurityLog","Akamai Security Events","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Akamai%20Security%20Events","azuresentinel","azure-sentinel-solution-akamai","2022-03-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AkamaiSecurityEventsAma","Akamai","[Deprecated] Akamai Security Events via AMA","Akamai Solution for Microsoft Sentinel provides the capability to ingest [Akamai Security Events](https://www.akamai.com/us/en/products/security/) into Microsoft Sentinel. Refer to [Akamai SIEM Integration documentation](https://developer.akamai.com/tools/integrations/siem) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Akamai Security Events and load the function code or click [here](https://aka.ms/sentinel-akamaisecurityevents-parser), on the second line of the query, enter the hostname(s) of your Akamai Security Events device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""[Follow these steps](https://developer.akamai.com/tools/integrations/siem) to configure Akamai CEF connector to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address."", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Akamai%20Security%20Events/Data%20Connectors/template_AkamaiSecurityEventsAMA.json","true" -"","Alibaba Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Alibaba%20Cloud","azuresentinel","azure-sentinel-solution-alibabacloud","2022-06-27","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"AliCloud_CL","Alibaba Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Alibaba%20Cloud","azuresentinel","azure-sentinel-solution-alibabacloud","2022-06-27","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AliCloud","AliCloud","AliCloud","The [AliCloud](https://www.alibabacloud.com/product/log-service) data connector provides the capability to retrieve logs from cloud applications using the Cloud API and store events into Microsoft Sentinel through the [REST API](https://aliyun-log-python-sdk.readthedocs.io/api.html). The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Azure Blob Storage API to pull logs into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**AliCloud**](https://aka.ms/sentinel-AliCloud-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the AliCloud API**\n\n Follow the instructions to obtain the credentials.\n\n1. Obtain the **AliCloudAccessKeyId** and **AliCloudAccessKey**: log in the account, click on AccessKey Management then click View Secret.\n2. Save credentials for using in the data connector.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the AliCloud data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": ""**Option 1 - Azure Resource Manager (ARM) Template**\n\nUse this method for automated deployment of the AliCloud data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-AliCloudAPI-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **WorkspaceID**, **WorkspaceKey**, **AliCloudAccessKeyId**, **AliCloudAccessKey**, **AliCloudProjects** and **AppInsightsWorkspaceResourceID** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": """", ""description"": ""**Option 2 - Manual Deployment of Azure Functions**\n\nUse the following step-by-step instructions to deploy the AliCloud data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-AliCloudAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. AliCloudXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tAliCloudAccessKeyId\n\t\tAliCloudAccessKey\n\t\tAliCloudProjects\n\t\tAppInsightsWorkspaceResourceID\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**AliCloudAccessKeyId** and **AliCloudAccessKey** are required for making API calls.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Alibaba%20Cloud/Data%20Connectors/AliCloud_API_FunctionApp.json","true" -"","Alibaba Cloud ActionTrail","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Alibaba%20Cloud%20ActionTrail","azuresentinel","azure-sentinel-solution-alibabacloud-actiontrail","2025-07-03","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"AliCloudActionTrailLogs_CL","Alibaba Cloud ActionTrail","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Alibaba%20Cloud%20ActionTrail","azuresentinel","azure-sentinel-solution-alibabacloud-actiontrail","2025-07-03","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AliCloudActionTrailCCPDefinition","Microsoft","Alibaba Cloud ActionTrail (via Codeless Connector Framework)","The [Alibaba Cloud ActionTrail](https://www.alibabacloud.com/product/actiontrail) data connector provides the capability to retrieve actiontrail events stored into [Alibaba Cloud Simple Log Service](https://www.alibabacloud.com/product/log-service) and store them into Microsoft Sentinel through the [SLS REST API](https://www.alibabacloud.com/help/sls/developer-reference/api-sls-2020-12-30-getlogs). The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""title"": ""Configure access to AliCloud SLS API"", ""description"": ""Before using the API, you need to prepare your identity account and access key pair to effectively access the API.\n1. We recommend that you use a Resource Access Management (RAM) user to call API operations. For more information, see [create a RAM user and authorize the RAM user to access Simple Log Service](https://www.alibabacloud.com/help/sls/create-a-ram-user-and-authorize-the-ram-user-to-access-log-service).\n2. Obtain the access key pair for the RAM user. For details see [get Access Key pair](https://www.alibabacloud.com/help/ram/user-guide/create-an-accesskey-pair).\n\nNote the access key pair details for the next step.""}, {""title"": ""Add ActionTrail Logstore"", ""description"": ""To enable the Alibaba Cloud ActionTrail connector for Microsoft Sentinel, click upon add ActionTrail Logstore, fill the form with the Alibaba Cloud environment configuration and click Connect."", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""AliCloud SLS Logstore Endpoint URL"", ""columnValue"": ""properties.request.apiEndpoint""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add Logstore"", ""title"": ""Add ActionTrail Logstore"", ""subtitle"": ""Add SLS Logstore linked to Alibaba Cloud ActionTrail"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Alibaba Cloud SLS Public Endpoint"", ""placeholder"": "".log.aliyuncs.com"", ""type"": ""string"", ""name"": ""endpoint""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Project"", ""placeholder"": """", ""type"": ""string"", ""name"": ""project""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Logstore"", ""placeholder"": """", ""type"": ""string"", ""name"": ""logstore""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Access Key ID"", ""placeholder"": ""Access Key ID"", ""type"": ""password"", ""name"": ""accessKeyId""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Access Key Secret"", ""placeholder"": ""Access Key Secret"", ""type"": ""password"", ""name"": ""accessKeySecret""}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""SLS REST API Credentials/permissions"", ""description"": ""**AliCloudAccessKeyId** and **AliCloudAccessKeySecret** are required for making API calls. RAM policy statement with action of atleast `log:GetLogStoreLogs` over resource `acs:log:{#regionId}:{#accountId}:project/{#ProjectName}/logstore/{#LogstoreName}` is needed to grant a RAM user the permissions to call this operation.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Alibaba%20Cloud%20ActionTrail/Data%20Connectors/AliCloudCloudTrailConnector_CCP/AliCloudActionTrail_DataConnectorDefinition.json","true" -"","Alsid For AD","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Alsid%20For%20AD","alsid1603447574634","Alsid_For_AD_MSS","2022-05-06","","","Alsid","Partner","https://www.alsid.com/contact-us/","","domains","","","","","","","false","","false" -"AlsidForADLog_CL","Alsid For AD","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Alsid%20For%20AD","alsid1603447574634","Alsid_For_AD_MSS","2022-05-06","","","Alsid","Partner","https://www.alsid.com/contact-us/","","domains","AlsidForAD","Alsid","Alsid for Active Directory","Alsid for Active Directory connector allows to export Alsid Indicators of Exposures, trailflow and Indicators of Attacks logs to Azure Sentinel in real time.
It provides a data parser to manipulate the logs more easily. The different workbooks ease your Active Directory monitoring and provide different ways to visualize the data. The analytic templates allow to automate responses regarding different events, exposures, or attacks.","[{""title"": """", ""description"": "">This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://aka.ms/sentinel-alsidforad-parser) to create the Kusto Functions alias, **afad_parser**"", ""instructions"": []}, {""title"": ""1. Configure the Syslog server"", ""description"": ""You will first need a **linux Syslog** server that Alsid for AD will send logs to. Typically you can run **rsyslog** on **Ubuntu**.\n You can then configure this server as you wish, but it is recommended to be able to output AFAD logs in a separate file.\nAlternatively you can use [this Quickstart template](https://azure.microsoft.com/resources/templates/alsid-syslog-proxy/) which will deploy the Syslog server and the Microsoft agent for you. If you do use this template, you can skip step 3.""}, {""title"": ""2. Configure Alsid to send logs to your Syslog server"", ""description"": ""On your **Alsid for AD** portal, go to *System*, *Configuration* and then *Syslog*.\nFrom there you can create a new Syslog alert toward your Syslog server.\n\nOnce this is done, check that the logs are correctly gathered on your server in a seperate file (to do this, you can use the *Test the configuration* button in the Syslog alert configuration in AFAD).\nIf you used the Quickstart template, the Syslog server will by default listen on port 514 in UDP and 1514 in TCP, without TLS.""}, {""title"": ""3. Install and onboard the Microsoft agent for Linux"", ""description"": ""You can skip this step if you used the Quickstart template in step 1"", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""4. Configure the logs to be collected by the agents"", ""description"": ""Configure the agent to collect the logs.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **Custom Logs**.\n2. Select **Apply below configuration to my machines** and click **Add**.\n3. Upload a sample AFAD Syslog file from the **Linux** machine running the **Syslog** server and click **Next**, for your convenience, you can find such a file [here](https://github.com/Azure/azure-quickstart-templates/blob/master/alsid-syslog-proxy/logs/AlsidForAD.log).\n4. Set the record delimiter to **New Line** if not already the case and click **Next**.\n5. Select **Linux** and enter the file path to the **Syslog** file, click **+** then **Next**. If you used the Quickstart template in step 1, the default location of the file is `/var/log/AlsidForAD.log`.\n6. Set the **Name** to *AlsidForADLog_CL* then click **Done** (Azure automatically adds *_CL* at the end of the name, there must be only one, make sure the name is not *AlsidForADLog_CL_CL*).\n\nAll of these steps are showcased [here](https://www.youtube.com/watch?v=JwV1uZSyXM4&feature=youtu.be) as an example"", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": """", ""description"": ""> You should now be able to receive logs in the *AlsidForADLog_CL* table, logs data can be parse using the **afad_parser()** function, used by all query samples, workbooks and analytic templates.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Alsid%20For%20AD/Data%20Connectors/AlsidForAD.json","true" -"","Amazon Web Services","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services","azuresentinel","azure-sentinel-solution-amazonwebservices","2022-05-26","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"AWSCloudTrail","Amazon Web Services","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services","azuresentinel","azure-sentinel-solution-amazonwebservices","2022-05-26","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AWS","Amazon","Amazon Web Services","Follow these instructions to connect to AWS and stream your CloudTrail logs into Microsoft Sentinel. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2218883&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Connect AWS cloud trail with Microsoft Sentinel\u200b"", ""description"": ""The connection necessitates giving Microsoft permissions to access your AWS account. To enable this, follow the instructions under [Connect AWS to Microsoft Sentinel](https://aka.ms/AWSConnector) and use these parameters when prompted:\n\n> Data from all regions will be sent to and stored in the workspace's region.\n\n> It takes about 5 minutes until the connection streams data to your workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""MicrosoftAwsAccount""], ""label"": ""Microsoft account ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""External ID (Workspace ID)""}, ""type"": ""CopyableLabel""}, {""parameters"": {""text"": ""The integration is applicable for AWS public cloud accounts."", ""visible"": false, ""inline"": true}, ""type"": ""InfoMessage""}, {""parameters"": {}, ""type"": ""AwsCloudTrail""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services/Data%20Connectors/template_AWS.json","true" -"AWSCloudTrail","Amazon Web Services","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services","azuresentinel","azure-sentinel-solution-amazonwebservices","2022-05-26","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AwsS3","Amazon","Amazon Web Services S3","This connector allows you to ingest AWS service logs, collected in AWS S3 buckets, to Microsoft Sentinel. The currently supported data types are:
* AWS CloudTrail
* VPC Flow Logs
* AWS GuardDuty
* AWSCloudWatch

For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2218883&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""1. Set up your AWS environment"", ""description"": ""There are two options for setting up your AWS environment to send logs from an S3 bucket to your Log Analytics Workspace:"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Setup with PowerShell script (recommended)"", ""instructions"": [{""parameters"": {""govScript"": ""Download and extract the files from the following link: [AWS S3 Setup Script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/AWS-S3/ConfigAwsS3DataConnectorScriptsGov.zip).\n\n> 1. Make sure that you have PowerShell on your machine: [Installation instructions for PowerShell](https://docs.microsoft.com/powershell/scripting/install/installing-powershell?view=powershell-7.2).\n\n> 2. Make sure that you have the AWS CLI on your machine: [Installation instructions for the AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/install-cliv2.html).\n\nBefore running the script, run the aws configure command from your PowerShell command line, and enter the relevant information as prompted. See [AWS Command Line Interface | Configuration basics](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-quickstart.html) for details. Note: When Aws configure is run, Default output format should not be set to None. It must be set to some value, such as json."", ""prodScript"": ""Download and extract the files from the following link: [AWS S3 Setup Script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/AWS-S3/ConfigAwsS3DataConnectorScripts.zip).\n\n> 1. Make sure that you have PowerShell on your machine: [Installation instructions for PowerShell](https://docs.microsoft.com/powershell/scripting/install/installing-powershell?view=powershell-7.2).\n\n> 2. Make sure that you have the AWS CLI on your machine: [Installation instructions for the AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/install-cliv2.html).\n\nBefore running the script, run the aws configure command from your PowerShell command line, and enter the relevant information as prompted. See [AWS Command Line Interface | Configuration basics](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-quickstart.html) for details. Note: When Aws configure is run, Default output format should not be set to None. It must be set to some value, such as json.""}, ""type"": ""MarkdownControlEnvBased""}, {""parameters"": {""label"": ""Run script to set up the environment"", ""value"": ""./ConfigAwsConnector.ps1""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""External ID (Workspace ID)""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Manual Setup"", ""description"": ""Follow the instruction in the following link to set up the environment: [Connect AWS S3 to Microsoft Sentinel](https://aka.ms/AWSS3Connector)""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Add connection"", ""instructions"": [{""parameters"": {}, ""type"": ""AwsS3""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Environment"", ""description"": ""you must have the following AWS resources defined and configured: S3, Simple Queue Service (SQS), IAM roles and permissions policies, and the AWS services whose logs you want to collect.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services/Data%20Connectors/template_AwsS3.json","true" -"AWSCloudWatch","Amazon Web Services","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services","azuresentinel","azure-sentinel-solution-amazonwebservices","2022-05-26","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AwsS3","Amazon","Amazon Web Services S3","This connector allows you to ingest AWS service logs, collected in AWS S3 buckets, to Microsoft Sentinel. The currently supported data types are:
* AWS CloudTrail
* VPC Flow Logs
* AWS GuardDuty
* AWSCloudWatch

For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2218883&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""1. Set up your AWS environment"", ""description"": ""There are two options for setting up your AWS environment to send logs from an S3 bucket to your Log Analytics Workspace:"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Setup with PowerShell script (recommended)"", ""instructions"": [{""parameters"": {""govScript"": ""Download and extract the files from the following link: [AWS S3 Setup Script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/AWS-S3/ConfigAwsS3DataConnectorScriptsGov.zip).\n\n> 1. Make sure that you have PowerShell on your machine: [Installation instructions for PowerShell](https://docs.microsoft.com/powershell/scripting/install/installing-powershell?view=powershell-7.2).\n\n> 2. Make sure that you have the AWS CLI on your machine: [Installation instructions for the AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/install-cliv2.html).\n\nBefore running the script, run the aws configure command from your PowerShell command line, and enter the relevant information as prompted. See [AWS Command Line Interface | Configuration basics](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-quickstart.html) for details. Note: When Aws configure is run, Default output format should not be set to None. It must be set to some value, such as json."", ""prodScript"": ""Download and extract the files from the following link: [AWS S3 Setup Script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/AWS-S3/ConfigAwsS3DataConnectorScripts.zip).\n\n> 1. Make sure that you have PowerShell on your machine: [Installation instructions for PowerShell](https://docs.microsoft.com/powershell/scripting/install/installing-powershell?view=powershell-7.2).\n\n> 2. Make sure that you have the AWS CLI on your machine: [Installation instructions for the AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/install-cliv2.html).\n\nBefore running the script, run the aws configure command from your PowerShell command line, and enter the relevant information as prompted. See [AWS Command Line Interface | Configuration basics](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-quickstart.html) for details. Note: When Aws configure is run, Default output format should not be set to None. It must be set to some value, such as json.""}, ""type"": ""MarkdownControlEnvBased""}, {""parameters"": {""label"": ""Run script to set up the environment"", ""value"": ""./ConfigAwsConnector.ps1""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""External ID (Workspace ID)""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Manual Setup"", ""description"": ""Follow the instruction in the following link to set up the environment: [Connect AWS S3 to Microsoft Sentinel](https://aka.ms/AWSS3Connector)""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Add connection"", ""instructions"": [{""parameters"": {}, ""type"": ""AwsS3""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Environment"", ""description"": ""you must have the following AWS resources defined and configured: S3, Simple Queue Service (SQS), IAM roles and permissions policies, and the AWS services whose logs you want to collect.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services/Data%20Connectors/template_AwsS3.json","true" -"AWSGuardDuty","Amazon Web Services","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services","azuresentinel","azure-sentinel-solution-amazonwebservices","2022-05-26","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AwsS3","Amazon","Amazon Web Services S3","This connector allows you to ingest AWS service logs, collected in AWS S3 buckets, to Microsoft Sentinel. The currently supported data types are:
* AWS CloudTrail
* VPC Flow Logs
* AWS GuardDuty
* AWSCloudWatch

For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2218883&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""1. Set up your AWS environment"", ""description"": ""There are two options for setting up your AWS environment to send logs from an S3 bucket to your Log Analytics Workspace:"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Setup with PowerShell script (recommended)"", ""instructions"": [{""parameters"": {""govScript"": ""Download and extract the files from the following link: [AWS S3 Setup Script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/AWS-S3/ConfigAwsS3DataConnectorScriptsGov.zip).\n\n> 1. Make sure that you have PowerShell on your machine: [Installation instructions for PowerShell](https://docs.microsoft.com/powershell/scripting/install/installing-powershell?view=powershell-7.2).\n\n> 2. Make sure that you have the AWS CLI on your machine: [Installation instructions for the AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/install-cliv2.html).\n\nBefore running the script, run the aws configure command from your PowerShell command line, and enter the relevant information as prompted. See [AWS Command Line Interface | Configuration basics](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-quickstart.html) for details. Note: When Aws configure is run, Default output format should not be set to None. It must be set to some value, such as json."", ""prodScript"": ""Download and extract the files from the following link: [AWS S3 Setup Script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/AWS-S3/ConfigAwsS3DataConnectorScripts.zip).\n\n> 1. Make sure that you have PowerShell on your machine: [Installation instructions for PowerShell](https://docs.microsoft.com/powershell/scripting/install/installing-powershell?view=powershell-7.2).\n\n> 2. Make sure that you have the AWS CLI on your machine: [Installation instructions for the AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/install-cliv2.html).\n\nBefore running the script, run the aws configure command from your PowerShell command line, and enter the relevant information as prompted. See [AWS Command Line Interface | Configuration basics](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-quickstart.html) for details. Note: When Aws configure is run, Default output format should not be set to None. It must be set to some value, such as json.""}, ""type"": ""MarkdownControlEnvBased""}, {""parameters"": {""label"": ""Run script to set up the environment"", ""value"": ""./ConfigAwsConnector.ps1""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""External ID (Workspace ID)""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Manual Setup"", ""description"": ""Follow the instruction in the following link to set up the environment: [Connect AWS S3 to Microsoft Sentinel](https://aka.ms/AWSS3Connector)""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Add connection"", ""instructions"": [{""parameters"": {}, ""type"": ""AwsS3""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Environment"", ""description"": ""you must have the following AWS resources defined and configured: S3, Simple Queue Service (SQS), IAM roles and permissions policies, and the AWS services whose logs you want to collect.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services/Data%20Connectors/template_AwsS3.json","true" -"AWSVPCFlow","Amazon Web Services","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services","azuresentinel","azure-sentinel-solution-amazonwebservices","2022-05-26","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AwsS3","Amazon","Amazon Web Services S3","This connector allows you to ingest AWS service logs, collected in AWS S3 buckets, to Microsoft Sentinel. The currently supported data types are:
* AWS CloudTrail
* VPC Flow Logs
* AWS GuardDuty
* AWSCloudWatch

For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2218883&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""1. Set up your AWS environment"", ""description"": ""There are two options for setting up your AWS environment to send logs from an S3 bucket to your Log Analytics Workspace:"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Setup with PowerShell script (recommended)"", ""instructions"": [{""parameters"": {""govScript"": ""Download and extract the files from the following link: [AWS S3 Setup Script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/AWS-S3/ConfigAwsS3DataConnectorScriptsGov.zip).\n\n> 1. Make sure that you have PowerShell on your machine: [Installation instructions for PowerShell](https://docs.microsoft.com/powershell/scripting/install/installing-powershell?view=powershell-7.2).\n\n> 2. Make sure that you have the AWS CLI on your machine: [Installation instructions for the AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/install-cliv2.html).\n\nBefore running the script, run the aws configure command from your PowerShell command line, and enter the relevant information as prompted. See [AWS Command Line Interface | Configuration basics](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-quickstart.html) for details. Note: When Aws configure is run, Default output format should not be set to None. It must be set to some value, such as json."", ""prodScript"": ""Download and extract the files from the following link: [AWS S3 Setup Script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/AWS-S3/ConfigAwsS3DataConnectorScripts.zip).\n\n> 1. Make sure that you have PowerShell on your machine: [Installation instructions for PowerShell](https://docs.microsoft.com/powershell/scripting/install/installing-powershell?view=powershell-7.2).\n\n> 2. Make sure that you have the AWS CLI on your machine: [Installation instructions for the AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/install-cliv2.html).\n\nBefore running the script, run the aws configure command from your PowerShell command line, and enter the relevant information as prompted. See [AWS Command Line Interface | Configuration basics](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-quickstart.html) for details. Note: When Aws configure is run, Default output format should not be set to None. It must be set to some value, such as json.""}, ""type"": ""MarkdownControlEnvBased""}, {""parameters"": {""label"": ""Run script to set up the environment"", ""value"": ""./ConfigAwsConnector.ps1""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""External ID (Workspace ID)""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Manual Setup"", ""description"": ""Follow the instruction in the following link to set up the environment: [Connect AWS S3 to Microsoft Sentinel](https://aka.ms/AWSS3Connector)""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Add connection"", ""instructions"": [{""parameters"": {}, ""type"": ""AwsS3""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Environment"", ""description"": ""you must have the following AWS resources defined and configured: S3, Simple Queue Service (SQS), IAM roles and permissions policies, and the AWS services whose logs you want to collect.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services/Data%20Connectors/template_AwsS3.json","true" -"","Amazon Web Services NetworkFirewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services%20NetworkFirewall","azuresentinel","azure-sentinel-solution-aws-networkfirewall","2025-03-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"AWSNetworkFirewallAlert","Amazon Web Services NetworkFirewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services%20NetworkFirewall","azuresentinel","azure-sentinel-solution-aws-networkfirewall","2025-03-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AwsNetworkFirewallCcpDefinition","Microsoft","Amazon Web Services NetworkFirewall (via Codeless Connector Framework)","This data connector allows you to ingest AWS Network Firewall logs into Microsoft Sentinel for advanced threat detection and security monitoring. By leveraging Amazon S3 and Amazon SQS, the connector forwards network traffic logs, intrusion detection alerts, and firewall events to Microsoft Sentinel, enabling real-time analysis and correlation with other security data","[{""title"": ""Ingesting AWS NetworkFirewall logs in Microsoft Sentinel"", ""description"": ""### List of Resources Required:\n\n* Open ID Connect (OIDC) web identity provider\n* IAM Role\n* Amazon S3 Bucket\n* Amazon SQS\n* AWSNetworkFirewall configuration\n* Follow this instructions for [AWS NetworkFirewall Data connector](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services%20NetworkFirewall/Data%20Connectors/readme.md) configuration \n\n"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. AWS CloudFormation Deployment \n To configure access on AWS, two templates has been generated to set up the AWS environment to send logs from an S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). \n 2. Choose the \u2018**Specify template**\u2019 option, then \u2018**Upload a template file**\u2019 by clicking on \u2018**Choose file**\u2019 and selecting the appropriate CloudFormation template file provided below. click \u2018**Choose file**\u2019 and select the downloaded template. \n 3. Click '**Next**' and '**Create stack**'.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWSNetworkFirewall resources deployment"", ""isMultiLine"": true, ""fillWith"": [""AWSNetworkFirewall""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-AWSNetworkFirewall-AlertLog"", ""text"": ""Alert Log""}, {""key"": ""Custom-AWSNetworkFirewall-FlowLog"", ""text"": ""Flow Log""}, {""key"": ""Custom-AWSNetworkFirewall-TlsLog"", ""text"": ""Tls Log""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services%20NetworkFirewall/Data%20Connectors/AWSNetworkFirewallLogs_CCP/AWSNetworkFirewallLog_ConnectorDefinition.json","true" -"AWSNetworkFirewallFlow","Amazon Web Services NetworkFirewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services%20NetworkFirewall","azuresentinel","azure-sentinel-solution-aws-networkfirewall","2025-03-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AwsNetworkFirewallCcpDefinition","Microsoft","Amazon Web Services NetworkFirewall (via Codeless Connector Framework)","This data connector allows you to ingest AWS Network Firewall logs into Microsoft Sentinel for advanced threat detection and security monitoring. By leveraging Amazon S3 and Amazon SQS, the connector forwards network traffic logs, intrusion detection alerts, and firewall events to Microsoft Sentinel, enabling real-time analysis and correlation with other security data","[{""title"": ""Ingesting AWS NetworkFirewall logs in Microsoft Sentinel"", ""description"": ""### List of Resources Required:\n\n* Open ID Connect (OIDC) web identity provider\n* IAM Role\n* Amazon S3 Bucket\n* Amazon SQS\n* AWSNetworkFirewall configuration\n* Follow this instructions for [AWS NetworkFirewall Data connector](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services%20NetworkFirewall/Data%20Connectors/readme.md) configuration \n\n"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. AWS CloudFormation Deployment \n To configure access on AWS, two templates has been generated to set up the AWS environment to send logs from an S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). \n 2. Choose the \u2018**Specify template**\u2019 option, then \u2018**Upload a template file**\u2019 by clicking on \u2018**Choose file**\u2019 and selecting the appropriate CloudFormation template file provided below. click \u2018**Choose file**\u2019 and select the downloaded template. \n 3. Click '**Next**' and '**Create stack**'.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWSNetworkFirewall resources deployment"", ""isMultiLine"": true, ""fillWith"": [""AWSNetworkFirewall""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-AWSNetworkFirewall-AlertLog"", ""text"": ""Alert Log""}, {""key"": ""Custom-AWSNetworkFirewall-FlowLog"", ""text"": ""Flow Log""}, {""key"": ""Custom-AWSNetworkFirewall-TlsLog"", ""text"": ""Tls Log""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services%20NetworkFirewall/Data%20Connectors/AWSNetworkFirewallLogs_CCP/AWSNetworkFirewallLog_ConnectorDefinition.json","true" -"AWSNetworkFirewallTls","Amazon Web Services NetworkFirewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services%20NetworkFirewall","azuresentinel","azure-sentinel-solution-aws-networkfirewall","2025-03-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AwsNetworkFirewallCcpDefinition","Microsoft","Amazon Web Services NetworkFirewall (via Codeless Connector Framework)","This data connector allows you to ingest AWS Network Firewall logs into Microsoft Sentinel for advanced threat detection and security monitoring. By leveraging Amazon S3 and Amazon SQS, the connector forwards network traffic logs, intrusion detection alerts, and firewall events to Microsoft Sentinel, enabling real-time analysis and correlation with other security data","[{""title"": ""Ingesting AWS NetworkFirewall logs in Microsoft Sentinel"", ""description"": ""### List of Resources Required:\n\n* Open ID Connect (OIDC) web identity provider\n* IAM Role\n* Amazon S3 Bucket\n* Amazon SQS\n* AWSNetworkFirewall configuration\n* Follow this instructions for [AWS NetworkFirewall Data connector](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services%20NetworkFirewall/Data%20Connectors/readme.md) configuration \n\n"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. AWS CloudFormation Deployment \n To configure access on AWS, two templates has been generated to set up the AWS environment to send logs from an S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). \n 2. Choose the \u2018**Specify template**\u2019 option, then \u2018**Upload a template file**\u2019 by clicking on \u2018**Choose file**\u2019 and selecting the appropriate CloudFormation template file provided below. click \u2018**Choose file**\u2019 and select the downloaded template. \n 3. Click '**Next**' and '**Create stack**'.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWSNetworkFirewall resources deployment"", ""isMultiLine"": true, ""fillWith"": [""AWSNetworkFirewall""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-AWSNetworkFirewall-AlertLog"", ""text"": ""Alert Log""}, {""key"": ""Custom-AWSNetworkFirewall-FlowLog"", ""text"": ""Flow Log""}, {""key"": ""Custom-AWSNetworkFirewall-TlsLog"", ""text"": ""Tls Log""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services%20NetworkFirewall/Data%20Connectors/AWSNetworkFirewallLogs_CCP/AWSNetworkFirewallLog_ConnectorDefinition.json","true" -"","Amazon Web Services Route 53","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services%20Route%2053","azuresentinel","azure-sentinel-solution-amazonwebservicesroute53","2025-03-21","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"AWSRoute53Resolver","Amazon Web Services Route 53","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services%20Route%2053","azuresentinel","azure-sentinel-solution-amazonwebservicesroute53","2025-03-21","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AWSRoute53ResolverCCPDefinition","Microsoft","Amazon Web Services S3 DNS Route53 (via Codeless Connector Framework)","This connector enables ingestion of AWS Route 53 DNS logs into Microsoft Sentinel for enhanced visibility and threat detection. It supports DNS Resolver query logs ingested directly from AWS S3 buckets, while Public DNS query logs and Route 53 audit logs can be ingested using Microsoft Sentinel's AWS CloudWatch and CloudTrail connectors. Comprehensive instructions are provided to guide you through the setup of each log type. Leverage this connector to monitor DNS activity, detect potential threats, and improve your security posture in cloud environments.","[{""title"": ""AWS Route53"", ""description"": ""This connector enables the ingestion of AWS Route 53 DNS logs into Microsoft Sentinel, providing enhanced visibility into DNS activity and strengthening threat detection capabilities. It supports direct ingestion of DNS Resolver query logs from AWS S3 buckets, while Public DNS query logs and Route 53 audit logs can be ingested via Microsoft Sentinel\u2019s AWS CloudWatch and CloudTrail connectors. Detailed setup instructions are provided for each log type. Use this connector to monitor DNS traffic, identify potential threats, and enhance your cloud security posture.\n\nYou can ingest the following type of logs from AWS Route 53 to Microsoft Sentinel:\n1. Route 53 Resolver query logs\n2. Route 53 Public Hosted zones query logs (via Microsoft Sentinel CloudWatch connector)\n3. Route 53 audit logs (via Microsoft Sentinel CloudTrail connector)""}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Ingesting Route53 Resolver query logs in Microsoft Sentinel"", ""description"": ""### List of Resources Required:\n\n* Open ID Connect (OIDC) web identity provider\n* IAM Role\n* Amazon S3 Bucket\n* Amazon SQS\n* Route 53 Resolver query logging configuration\n* VPC to associate with Route53 Resolver query log config\n\n"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. AWS CloudFormation Deployment \n To configure access on AWS, two templates has been generated to set up the AWS environment to send logs from an S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). \n 2. Choose the \u2018**Specify template**\u2019 option, then \u2018**Upload a template file**\u2019 by clicking on \u2018**Choose file**\u2019 and selecting the appropriate CloudFormation template file provided below. click \u2018**Choose file**\u2019 and select the downloaded template. \n 3. Click '**Next**' and '**Create stack**'.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS Route53 resources deployment"", ""isMultiLine"": true, ""fillWith"": [""AWSRoute53Resolver""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""### 2. Connect new collectors \n To enable Amazon Web Services S3 DNS Route53 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS Security Hub connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""securestring"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""securestring"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}]}]}}]}, {""title"": ""Ingesting Route 53 Public Hosted zones query logs (via Microsoft Sentinel CloudWatch connector)"", ""description"": ""Public Hosted zone query logs are exported to CloudWatch service in AWS. We can use 'Amazon Web Services S3' connector to ingest CloudWatch logs from AWS to Microsoft Sentinel."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1: Configure logging for Public DNS queries"", ""description"": ""1. Sign in to the AWS Management Console and open the Route 53 console at [AWS Route 53](https://console.aws.amazon.com/route53/).\n2. Navigate to Route 53 > Hosted zones.\n3. Choose the Public hosted zone that you want to configure query logging for.\n4. In the Hosted zone details pane, click \""Configure query logging\"".\n5. Choose an existing log group or create a new log group.\n6. Choose Create.""}, {""title"": ""Step 2: Configure Amazon Web Services S3 data connector for AWS CloudWatch"", ""description"": ""AWS CloudWatch logs can be exported to an S3 bucket using lambda function. To ingest Public DNS queries from `AWS CloudWatch` to `S3` bucket and then to Microsoft Sentinel, follow the instructions provided in the [Amazon Web Services S3 connector](https://learn.microsoft.com/en-us/azure/sentinel/connect-aws?tabs=s3).""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Ingesting Route 53 audit logs (via Microsoft Sentinel CloudTrail connector)"", ""description"": ""Route 53 audit logs i.e. the logs related to actions taken by user, role or AWS service in Route 53 can be exported to an S3 bucket via AWS CloudTrail service. We can use 'Amazon Web Services S3' connector to ingest CloudTrail logs from AWS to Microsoft Sentinel."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1: Configure logging for AWS Route 53 Audit logs"", ""description"": ""1. Sign in to the AWS Management Console and open the CloudTrail console at [AWS CloudTrail](https://console.aws.amazon.com/cloudtrail)\n2. If you do not have an existing trail, click on 'Create trail'\n3. Enter a name for your trail in the Trail name field.\n4. Select Create new S3 bucket (you may also choose to use an existing S3 bucket).\n5. Leave the other settings as default, and click Next.\n6. Select Event type, make sure Management events is selected.\n7. Select API activity, 'Read' and 'Write'\n8. Click Next.\n9. Review the settings and click 'Create trail'.""}, {""title"": ""Step 2: Configure Amazon Web Services S3 data connector for AWS CloudTrail"", ""description"": ""To ingest audit and management logs from `AWS CloudTrail` to Microsoft Sentinel, follow the instructions provided in the [Amazon Web Services S3 connector](https://learn.microsoft.com/en-us/azure/sentinel/connect-aws?tabs=s3)""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": false, ""write"": false, ""delete"": false, ""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services%20Route%2053/Data%20Connectors/AWSRoute53Resolver_CCP/AWSRoute53Resolver_DataConnectorDefinition.json","true" -"","Anvilogic","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Anvilogic","anvilogic1725900018831","azure-sentinel-solution-anvilogic","2025-06-20","","","Anvilogic","Partner","https://www.anvilogic.com/","","domains","","","","","","","false","","false" -"Anvilogic_Alerts_CL","Anvilogic","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Anvilogic","anvilogic1725900018831","azure-sentinel-solution-anvilogic","2025-06-20","","","Anvilogic","Partner","https://www.anvilogic.com/","","domains","AnvilogicCCFDefinition","Anvilogic","Anvilogic","The Anvilogic data connector allows you to pull events of interest generated in the Anvilogic ADX cluster into your Microsoft Sentinel","[{""description"": ""Complete the form to ingest Anvilogic Alerts into your Microsoft Sentinel"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Token Endpoint"", ""placeholder"": ""https://login[.]microsoftonline[.]com//oauth2/v2.0/token"", ""type"": ""text"", ""name"": ""tokenEndpoint""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Anvilogic ADX Scope"", ""placeholder"": ""/.default"", ""type"": ""text"", ""name"": ""authorizationEndpoint""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Anvilogic ADX Request URI"", ""placeholder"": ""/v2/rest/query"", ""type"": ""text"", ""name"": ""apiEndpoint""}}, {""type"": ""OAuthForm"", ""parameters"": {""clientIdLabel"": ""Client ID"", ""clientSecretLabel"": ""Client Secret"", ""connectButtonLabel"": ""Connect"", ""disconnectButtonLabel"": ""Disconnect""}}], ""title"": ""Connect to Anvilogic to start collecting events of interest in Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Anvilogic Application Registration Client ID and Client Secret"", ""description"": ""To access the Anvilogic ADX we require the client id and client secret from the Anvilogic app registration""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Anvilogic/Data%20Connectors/AnviLogic_CCF/Anvilogic_DataConnectorDefinition.json","true" -"","Apache Log4j Vulnerability Detection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Apache%20Log4j%20Vulnerability%20Detection","azuresentinel","azure-sentinel-solution-apachelog4jvulnerability","2021-12-15","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"","ApacheHTTPServer","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ApacheHTTPServer","azuresentinel","azure-sentinel-solution-apachehttpserver","2021-10-27","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"ApacheHTTPServer_CL","ApacheHTTPServer","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ApacheHTTPServer","azuresentinel","azure-sentinel-solution-apachehttpserver","2021-10-27","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","ApacheHTTPServer","Apache","[Deprecated] Apache HTTP Server","The Apache HTTP Server data connector provides the capability to ingest [Apache HTTP Server](http://httpd.apache.org/) events into Microsoft Sentinel. Refer to [Apache Logs documentation](https://httpd.apache.org/docs/2.4/logs.html) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias ApacheHTTPServer and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ApacheHTTPServer/Parsers/ApacheHTTPServer.txt). The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Apache HTTP Server where the logs are generated.\n\n> Logs from Apache HTTP Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the custom log directory to be collected"", ""instructions"": [{""parameters"": {""linkType"": ""OpenCustomLogsSettings""}, ""type"": ""InstallAgent""}]}, {""title"": """", ""description"": ""1. Select the link above to open your workspace advanced settings \n2. From the left pane, select **Data**, select **Custom Logs** and click **Add+**\n3. Click **Browse** to upload a sample of a Apache HTTP Server log file (e.g. access.log or error.log). Then, click **Next >**\n4. Select **New line** as the record delimiter and click **Next >**\n5. Select **Windows** or **Linux** and enter the path to Apache HTTP logs based on your configuration. Example: \n - **Windows** directory: `C:\\Server\\bin\\Apache24\\logs\\*.log`\n - **Linux** Directory: `/var/log/httpd/*.log` \n6. After entering the path, click the '+' symbol to apply, then click **Next >** \n7. Add **ApacheHTTPServer_CL** as the custom log Name and click **Done**""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ApacheHTTPServer/Data%20Connectors/Connector_ApacheHTTPServer_agent.json","true" -"","AristaAwakeSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AristaAwakeSecurity","arista-networks","awake-security","2021-10-18","","","Arista - Awake Security","Partner","https://awakesecurity.com/","","domains","","","","","","","false","","false" -"CommonSecurityLog","AristaAwakeSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AristaAwakeSecurity","arista-networks","awake-security","2021-10-18","","","Arista - Awake Security","Partner","https://awakesecurity.com/","","domains","AristaAwakeSecurity","Arista Networks","[Deprecated] Awake Security via Legacy Agent","The Awake Security CEF connector allows users to send detection model matches from the Awake Security Platform to Microsoft Sentinel. Remediate threats quickly with the power of network detection and response and speed up investigations with deep visibility especially into unmanaged entities including users, devices and applications on your network. The connector also enables the creation of network security-focused custom alerts, incidents, workbooks and notebooks that align with your existing security operations workflows. ","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Awake Adversarial Model match results to a CEF collector."", ""description"": ""Perform the following steps to forward Awake Adversarial Model match results to a CEF collector listening on TCP port **514** at IP **192.168.0.1**:\n- Navigate to the Detection Management Skills page in the Awake UI.\n- Click + Add New Skill.\n- Set the Expression field to,\n>integrations.cef.tcp { destination: \""192.168.0.1\"", port: 514, secure: false, severity: Warning }\n- Set the Title field to a descriptive name like,\n>Forward Awake Adversarial Model match result to Microsoft Sentinel.\n- Set the Reference Identifier to something easily discoverable like,\n>integrations.cef.sentinel-forwarder\n- Click Save.\n\nNote: Within a few minutes of saving the definition and other fields the system will begin sending new model match results to the CEF events collector as they are detected.\n\nFor more information, refer to the **Adding a Security Information and Event Management Push Integration** page from the Help Documentation in the Awake UI.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AristaAwakeSecurity/Data%20Connectors/Connector_AristaAwakeSecurity_CEF.json","true" -"","Armis","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armis","armisinc1668090987837","armis-solution","2022-08-02","2024-08-23","","Armis Corporation","Partner","https://support.armis.com/","","domains","","","","","","","false","","false" -"Armis_Activities_CL","Armis","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armis","armisinc1668090987837","armis-solution","2022-08-02","2024-08-23","","Armis Corporation","Partner","https://support.armis.com/","","domains","ArmisActivities","Armis","Armis Activities","The [Armis](https://www.armis.com/) Activities connector gives the capability to ingest Armis device Activities into Microsoft Sentinel through the Armis REST API. Refer to the API documentation: `https://.armis.com/api/v1/doc` for more information. The connector provides the ability to get device activity information from the Armis platform. Armis uses your existing infrastructure to discover and identify devices without having to deploy any agents. Armis detects what all devices are doing in your environment and classifies those activities to get a complete picture of device behavior. These activities are analyzed for an understanding of normal and abnormal device behavior and used to assess device and network risk.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Armis API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias ArmisActivities and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armis/Parsers/ArmisActivities.yaml). The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Armis API**\n\n Follow these instructions to create an Armis API secret key.\n 1. Log into your Armis instance\n 2. Navigate to Settings -> API Management\n 3. If the secret key has not already been created, press the Create button to create the secret key\n 4. To access the secret key, press the Show button\n 5. The secret key can now be copied and used during the Armis Activities connector configuration""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Armis Activities data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Armis API Authorization Key(s)"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Armis connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ArmisActivitiesAPI-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-ArmisActivitiesAPI-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tArmis Secret Key \n\t\tArmis URL (https://.armis.com/api/v1/) \n\t\tArmis Activity Table Name \n\t\tArmis Schedule \n\t\tAvoid Duplicates (Default: false) \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Armis Activity data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-ArmisActivitiesAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ARMISXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tArmis Secret Key \n\t\tArmis URL (https://.armis.com/api/v1/) \n\t\tArmis Activity Table Name \n\t\tArmis Schedule \n\t\tAvoid Duplicates (Default: false) \n\t\tlogAnalyticsUri (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Armis Secret Key** is required. See the documentation to learn more about API on the `https://.armis.com/api/v1/doc`""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armis/Data%20Connectors/ArmisActivities/ArmisActivities_API_FunctionApp.json","true" -"Armis_Alerts_CL","Armis","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armis","armisinc1668090987837","armis-solution","2022-08-02","2024-08-23","","Armis Corporation","Partner","https://support.armis.com/","","domains","ArmisAlerts","Armis","Armis Alerts","The [Armis](https://www.armis.com/) Alerts connector gives the capability to ingest Armis Alerts into Microsoft Sentinel through the Armis REST API. Refer to the API documentation: `https://.armis.com/api/v1/docs` for more information. The connector provides the ability to get alert information from the Armis platform and to identify and prioritize threats in your environment. Armis uses your existing infrastructure to discover and identify devices without having to deploy any agents. ","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Armis API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://aka.ms/sentinel-ArmisAlertsAPI-parser) to create the Kusto functions alias, **ArmisAlerts**""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Armis API**\n\n Follow these instructions to create an Armis API secret key.\n 1. Log into your Armis instance\n 2. Navigate to Settings -> API Management\n 3. If the secret key has not already been created, press the Create button to create the secret key\n 4. To access the secret key, press the Show button\n 5. The secret key can now be copied and used during the Armis Alerts connector configuration""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Armis Alert data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Armis API Authorization Key(s)"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Armis connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ArmisAlertsAPI-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-ArmisAlertsAPI-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tArmis Secret Key \n\t\tArmis URL (https://.armis.com/api/v1/) \n\t\tArmis Alert Table Name \n\t\tArmis Schedule \n\t\tAvoid Duplicates (Default: true) \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Armis Alert data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-ArmisAlertsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ARMISXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tArmis Secret Key \n\t\tArmis URL (https://.armis.com/api/v1/) \n\t\tArmis Alert Table Name \n\t\tArmis Schedule \n\t\tAvoid Duplicates (Default: true) \n\t\tlogAnalyticsUri (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Armis Secret Key** is required. See the documentation to learn more about API on the `https://.armis.com/api/v1/doc`""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armis/Data%20Connectors/ArmisAlerts/ArmisAlerts_API_FunctionApp.json","true" -"Armis_Activities_CL","Armis","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armis","armisinc1668090987837","armis-solution","2022-08-02","2024-08-23","","Armis Corporation","Partner","https://support.armis.com/","","domains","ArmisAlertsActivities","Armis","Armis Alerts Activities","The [Armis](https://www.armis.com/) Alerts Activities connector gives the capability to ingest Armis Alerts and Activities into Microsoft Sentinel through the Armis REST API. Refer to the API documentation: `https://.armis.com/api/v1/docs` for more information. The connector provides the ability to get alert and activity information from the Armis platform and to identify and prioritize threats in your environment. Armis uses your existing infrastructure to discover and identify devices without having to deploy any agents. ","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Armis API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias ArmisActivities/ArmisAlerts and load the function code. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Armis API**\n\n Follow these instructions to create an Armis API secret key.\n 1. Log into your Armis instance\n 2. Navigate to Settings -> API Management\n 3. If the secret key has not already been created, press the Create button to create the secret key\n 4. To access the secret key, press the Show button\n 5. The secret key can now be copied and used during the Armis Alerts Activities connector configuration""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Armis Alerts Activities Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of Armis Alerts Activities Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Armis Alerts Activities Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Create a Keyvault**\n\n Follow these instructions to create a new Keyvault.\n 1. In the Azure portal, Go to **Key vaults**. Click create.\n 2. Select Subsciption, Resource Group and provide unique name of keyvault.\n\n> **NOTE:** Create a separate key vault for each **API key** within one workspace.""}, {""title"": """", ""description"": ""**STEP 7 - Create Access Policy in Keyvault**\n\n Follow these instructions to create access policy in Keyvault.\n 1. Go to keyvaults, select your keyvault, go to Access policies on left side panel. Click create.\n 2. Select all keys & secrets permissions. Click next.\n 3. In the principal section, search by application name which was generated in STEP - 2. Click next.\n\n> **NOTE:** Ensure the Permission model in the Access Configuration of Key Vault is set to **'Vault access policy'**""}, {""title"": """", ""description"": ""**STEP 8 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Armis Device data connector, have the Armis API Authorization Key(s) readily available..""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Armis connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ArmisAlertsActivitiesAPI-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-ArmisAlertsActivitiesAPI-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace Name \n\t\tArmis Secret Key \n\t\tArmis URL (https://.armis.com/api/v1/) \n\t\tArmis Alert Table Name \n\t\tArmis Activity Table Name \n\t\tSeverity (Default: Low) \n\t\tArmis Schedule \n\t\tKeyVault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tAzure Entra ObjectID \n\t\tTenant Id \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Armis Alerts Activities data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-ArmisAlertsActivities320-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ARMISXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace Name \n\t\tArmis Secret Key \n\t\tArmis URL (https://.armis.com/api/v1/) \n\t\tArmis Alert Table Name \n\t\tArmis Activity Table Name \n\t\tSeverity (Default: Low) \n\t\tArmis Schedule \n\t\tKeyVault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tAzure Entra ObjectID \n\t\tTenant Id \n\t\tlogAnalyticsUri (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Armis Secret Key** is required. See the documentation to learn more about API on the `https://.armis.com/api/v1/doc`""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armis/Data%20Connectors/ArmisAlertsActivities/ArmisAlertsActivities_API_FunctionApp.json","true" -"Armis_Alerts_CL","Armis","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armis","armisinc1668090987837","armis-solution","2022-08-02","2024-08-23","","Armis Corporation","Partner","https://support.armis.com/","","domains","ArmisAlertsActivities","Armis","Armis Alerts Activities","The [Armis](https://www.armis.com/) Alerts Activities connector gives the capability to ingest Armis Alerts and Activities into Microsoft Sentinel through the Armis REST API. Refer to the API documentation: `https://.armis.com/api/v1/docs` for more information. The connector provides the ability to get alert and activity information from the Armis platform and to identify and prioritize threats in your environment. Armis uses your existing infrastructure to discover and identify devices without having to deploy any agents. ","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Armis API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias ArmisActivities/ArmisAlerts and load the function code. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Armis API**\n\n Follow these instructions to create an Armis API secret key.\n 1. Log into your Armis instance\n 2. Navigate to Settings -> API Management\n 3. If the secret key has not already been created, press the Create button to create the secret key\n 4. To access the secret key, press the Show button\n 5. The secret key can now be copied and used during the Armis Alerts Activities connector configuration""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Armis Alerts Activities Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of Armis Alerts Activities Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Armis Alerts Activities Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Create a Keyvault**\n\n Follow these instructions to create a new Keyvault.\n 1. In the Azure portal, Go to **Key vaults**. Click create.\n 2. Select Subsciption, Resource Group and provide unique name of keyvault.\n\n> **NOTE:** Create a separate key vault for each **API key** within one workspace.""}, {""title"": """", ""description"": ""**STEP 7 - Create Access Policy in Keyvault**\n\n Follow these instructions to create access policy in Keyvault.\n 1. Go to keyvaults, select your keyvault, go to Access policies on left side panel. Click create.\n 2. Select all keys & secrets permissions. Click next.\n 3. In the principal section, search by application name which was generated in STEP - 2. Click next.\n\n> **NOTE:** Ensure the Permission model in the Access Configuration of Key Vault is set to **'Vault access policy'**""}, {""title"": """", ""description"": ""**STEP 8 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Armis Device data connector, have the Armis API Authorization Key(s) readily available..""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Armis connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ArmisAlertsActivitiesAPI-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-ArmisAlertsActivitiesAPI-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace Name \n\t\tArmis Secret Key \n\t\tArmis URL (https://.armis.com/api/v1/) \n\t\tArmis Alert Table Name \n\t\tArmis Activity Table Name \n\t\tSeverity (Default: Low) \n\t\tArmis Schedule \n\t\tKeyVault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tAzure Entra ObjectID \n\t\tTenant Id \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Armis Alerts Activities data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-ArmisAlertsActivities320-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ARMISXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace Name \n\t\tArmis Secret Key \n\t\tArmis URL (https://.armis.com/api/v1/) \n\t\tArmis Alert Table Name \n\t\tArmis Activity Table Name \n\t\tSeverity (Default: Low) \n\t\tArmis Schedule \n\t\tKeyVault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tAzure Entra ObjectID \n\t\tTenant Id \n\t\tlogAnalyticsUri (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Armis Secret Key** is required. See the documentation to learn more about API on the `https://.armis.com/api/v1/doc`""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armis/Data%20Connectors/ArmisAlertsActivities/ArmisAlertsActivities_API_FunctionApp.json","true" -"Armis_Devices_CL","Armis","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armis","armisinc1668090987837","armis-solution","2022-08-02","2024-08-23","","Armis Corporation","Partner","https://support.armis.com/","","domains","ArmisDevices","Armis","Armis Devices","The [Armis](https://www.armis.com/) Device connector gives the capability to ingest Armis Devices into Microsoft Sentinel through the Armis REST API. Refer to the API documentation: `https://.armis.com/api/v1/docs` for more information. The connector provides the ability to get device information from the Armis platform. Armis uses your existing infrastructure to discover and identify devices without having to deploy any agents. Armis can also integrate with your existing IT & security management tools to identify and classify each and every device, managed or unmanaged in your environment.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Armis API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://aka.ms/sentinel-ArmisDevice-parser) to create the Kusto functions alias, **ArmisDevice**""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Armis API**\n\n Follow these instructions to create an Armis API secret key.\n 1. Log into your Armis instance\n 2. Navigate to Settings -> API Management\n 3. If the secret key has not already been created, press the Create button to create the secret key\n 4. To access the secret key, press the Show button\n 5. The secret key can now be copied and used during the Armis Device connector configuration""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Armis Device Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of Armis Device Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Armis Device Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Create a Keyvault**\n\n Follow these instructions to create a new Keyvault.\n 1. In the Azure portal, Go to **Key vaults**. Click create.\n 2. Select Subsciption, Resource Group and provide unique name of keyvault.\n\n> **NOTE:** Create a separate key vault for each **API key** within one workspace.""}, {""title"": """", ""description"": ""**STEP 7 - Create Access Policy in Keyvault**\n\n Follow these instructions to create access policy in Keyvault.\n 1. Go to keyvaults, select your keyvault, go to Access policies on left side panel. Click create.\n 2. Select all keys & secrets permissions. Click next.\n 3. In the principal section, search by application name which was generated in STEP - 2. Click next.\n\n> **NOTE:** Ensure the Permission model in the Access Configuration of Key Vault is set to **'Vault access policy'**""}, {""title"": """", ""description"": ""**STEP 8 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Armis Device data connector, have the Armis API Authorization Key(s) readily available..""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Armis connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ArmisDevice-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-ArmisDevice-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tLocation \n\t\tWorkspace Name \n\t\tArmis Secret Key \n\t\tArmis URL (https://.armis.com/api/v1/) \n\t\tArmis Device Table Name \n\t\tArmis Schedule \n\t\tKeyVault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tAzure Entra ObjectID \n\t\tTenant Id \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Armis Device data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-ArmisDevice320-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ARMISXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tLocation \n\t\tWorkspace Name \n\t\tArmis Secret Key \n\t\tArmis URL (https://.armis.com/api/v1/) \n\t\tArmis Device Table Name \n\t\tArmis Schedule \n\t\tKeyVault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tAzure Entra ObjectID \n\t\tTenant Id \n\t\tlogAnalyticsUri (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Armis Secret Key** is required. See the documentation to learn more about API on the `https://.armis.com/api/v1/doc`""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armis/Data%20Connectors/ArmisDevice/ArmisDevice_API_FunctionApp.json","true" -"","Armorblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armorblox","armorblox1601081599926","armorblox_sentinel_1","2021-10-18","","","Armorblox","Partner","https://www.armorblox.com/contact/","","domains","","","","","","","false","","false" -"Armorblox_CL","Armorblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armorblox","armorblox1601081599926","armorblox_sentinel_1","2021-10-18","","","Armorblox","Partner","https://www.armorblox.com/contact/","","domains","Armorblox","Armorblox","Armorblox","The [Armorblox](https://www.armorblox.com/) data connector provides the capability to ingest incidents from your Armorblox instance into Microsoft Sentinel through the REST API. The connector provides ability to get events which helps to examine potential security risks, and more.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Armorblox API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Armorblox API**\n\n Follow the instructions to obtain the API token.\n\n1. Log in to the Armorblox portal with your credentials.\n2. In the portal, click **Settings**.\n3. In the **Settings** view, click **API Keys**\n4. Click **Create API Key**.\n5. Enter the required information.\n6. Click **Create**, and copy the API token displayed in the modal.\n7. Save API token for using in the data connector.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Armorblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Armorblox data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-armorblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **ArmorbloxAPIToken**, **ArmorbloxInstanceURL** OR **ArmorbloxInstanceName**, and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Armorblox data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-armorblox-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. Armorblox).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tArmorbloxAPIToken\n\t\tArmorbloxInstanceName OR ArmorbloxInstanceURL\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tLogAnalyticsUri (optional)\n> - Use LogAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Armorblox Instance Details"", ""description"": ""**ArmorbloxInstanceName** OR **ArmorbloxInstanceURL** is required""}, {""name"": ""Armorblox API Credentials"", ""description"": ""**ArmorbloxAPIToken** is required""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armorblox/Data%20Connectors/Armorblox_API_FunctionApp.json","true" -"","Aruba ClearPass","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Aruba%20ClearPass","azuresentinel","azure-sentinel-solution-arubaclearpass","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"CommonSecurityLog","Aruba ClearPass","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Aruba%20ClearPass","azuresentinel","azure-sentinel-solution-arubaclearpass","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ArubaClearPass","Aruba Networks","[Deprecated] Aruba ClearPass via Legacy Agent","The [Aruba ClearPass](https://www.arubanetworks.com/products/security/network-access-control/secure-access/) connector allows you to easily connect your Aruba ClearPass with Microsoft Sentinel, to create custom dashboards, alerts, and improve investigation. This gives you more insight into your organization’s network and improves your security operation capabilities.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias ArubaClearPass and load the function code or click [here](https://aka.ms/sentinel-arubaclearpass-parser).The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Aruba ClearPass logs to a Syslog agent"", ""description"": ""Configure Aruba ClearPass to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent.\n1. [Follow these instructions](https://www.arubanetworks.com/techdocs/ClearPass/6.7/PolicyManager/Content/CPPM_UserGuide/Admin/syslogExportFilters_add_syslog_filter_general.htm) to configure the Aruba ClearPass to forward syslog.\n2. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Aruba%20ClearPass/Data%20Connectors/Connector_Syslog_ArubaClearPass.json","true" -"CommonSecurityLog","Aruba ClearPass","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Aruba%20ClearPass","azuresentinel","azure-sentinel-solution-arubaclearpass","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ArubaClearPassAma","Aruba Networks","[Deprecated] Aruba ClearPass via AMA","The [Aruba ClearPass](https://www.arubanetworks.com/products/security/network-access-control/secure-access/) connector allows you to easily connect your Aruba ClearPass with Microsoft Sentinel, to create custom dashboards, alerts, and improve investigation. This gives you more insight into your organization’s network and improves your security operation capabilities.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias ArubaClearPass and load the function code or click [here](https://aka.ms/sentinel-arubaclearpass-parser).The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Forward Aruba ClearPass logs to a Syslog agent"", ""description"": ""Configure Aruba ClearPass to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent.\n1. [Follow these instructions](https://www.arubanetworks.com/techdocs/ClearPass/6.7/PolicyManager/Content/CPPM_UserGuide/Admin/syslogExportFilters_add_syslog_filter_general.htm) to configure the Aruba ClearPass to forward syslog.\n2. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address."", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Aruba%20ClearPass/Data%20Connectors/template_ArubaClearPassAMA.json","true" -"","AtlassianConfluenceAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AtlassianConfluenceAudit","azuresentinel","azure-sentinel-solution-atlassianconfluenceaudit","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"AtlassianConfluenceNativePoller_CL","AtlassianConfluenceAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AtlassianConfluenceAudit","azuresentinel","azure-sentinel-solution-atlassianconfluenceaudit","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AtlassianConfluence","Atlassian","Atlassian Confluence","The Atlassian Confluence data connector provides the capability to ingest [Atlassian Confluence audit logs](https://developer.atlassian.com/cloud/confluence/rest/api-group-audit/) into Microsoft Sentinel.","[{""title"": ""Connect Atlassian Confluence"", ""description"": ""Please insert your credentials"", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""Domain Name"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{domain}}"", ""placeHolderValue"": """"}]}, ""type"": ""BasicAuth""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Atlassian Confluence API credentials"", ""description"": ""Confluence Username and Confluence Access Token are required. [See the documentation to learn more about Atlassian Confluence API](https://developer.atlassian.com/cloud/confluence/rest/intro/). Confluence domain must be provided as well.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AtlassianConfluenceAudit/Data%20Connectors/ConfluenceNativePollerConnector/azuredeploy_Confluence_native_poller_connector.json","true" -"Confluence_Audit_CL","AtlassianConfluenceAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AtlassianConfluenceAudit","azuresentinel","azure-sentinel-solution-atlassianconfluenceaudit","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","ConfluenceAuditAPI","Atlassian","[Deprecated] Atlassian Confluence Audit","The [Atlassian Confluence](https://www.atlassian.com/software/confluence) Audit data connector provides the capability to ingest [Confluence Audit Records](https://support.atlassian.com/confluence-cloud/docs/view-the-audit-log/) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE: This data connector has been deprecated, consider moving to the CCP data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Confluence REST API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Confluence API**\n\n [Follow the instructions](https://developer.atlassian.com/cloud/confluence/rest/intro/#auth) to obtain the credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Confluence Audit data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-confluenceaudit-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-confluenceaudit-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **ConfluenceAccessToken**, **ConfluenceUsername**, **ConfluenceHomeSiteName** (short site name part, as example HOMESITENAME from https://community.atlassian.com) and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Confluence Audit data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-confluenceauditapi-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ConflAuditXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tConfluenceUsername\n\t\tConfluenceAccessToken\n\t\tConfluenceHomeSiteName\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**ConfluenceAccessToken**, **ConfluenceUsername** is required for REST API. [See the documentation to learn more about API](https://developer.atlassian.com/cloud/confluence/rest/api-group-audit/). Check all [requirements and follow the instructions](https://developer.atlassian.com/cloud/confluence/rest/intro/#auth) for obtaining credentials.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AtlassianConfluenceAudit/Data%20Connectors/AtlassianConfluenceAuditDataConnector/ConfluenceAudit_API_FunctionApp.json","true" -"ConfluenceAuditLogs_CL","AtlassianConfluenceAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AtlassianConfluenceAudit","azuresentinel","azure-sentinel-solution-atlassianconfluenceaudit","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","ConfluenceAuditCCPDefinition","Microsoft"," Atlassian Confluence Audit (via Codeless Connector Framework)","The [Atlassian Confluence](https://www.atlassian.com/software/confluence) Audit data connector provides the capability to ingest [Confluence Audit Records](https://support.atlassian.com/confluence-cloud/docs/view-the-audit-log/) events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://support.atlassian.com/confluence-cloud/docs/view-the-audit-log/) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""description"": ""To enable the Atlassian Confluence connector for Microsoft Sentinel, click to add an organization, fill the form with the Confluence environment credentials and click to Connect. \n Follow [these steps](https://support.atlassian.com/atlassian-account/docs/manage-api-tokens-for-your-atlassian-account/) to create an API token.\n "", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Atlassian Confluence organization URL"", ""columnValue"": ""properties.request.apiEndpoint""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add organization"", ""title"": ""Add organization"", ""subtitle"": ""Add Atlassian Confluence organization"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Atlassian Confluence organization URL"", ""placeholder"": "".atlassian.net"", ""type"": ""string"", ""name"": ""confluenceorganizationurl""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""User Name"", ""placeholder"": ""User Name (e.g., user@example.com)"", ""type"": ""securestring"", ""name"": ""userid""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""password"", ""name"": ""apikey""}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Atlassian Confluence API access"", ""description"": ""Permission of [Administer Confluence](https://developer.atlassian.com/cloud/confluence/rest/v1/intro/#auth) is required to get access to the Confluence Audit logs API. See [Confluence API documentation](https://developer.atlassian.com/cloud/confluence/rest/v1/api-group-audit/#api-wiki-rest-api-audit-get) to learn more about the audit API.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AtlassianConfluenceAudit/Data%20Connectors/AtlassianConfluenceAuditLogs_CCP/AtlassianConfluenceAudit_DataConnectorDefinition.json","true" -"","AtlassianJiraAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AtlassianJiraAudit","azuresentinel","azure-sentinel-solution-atlassianjiraaudit","2022-01-10","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"Jira_Audit_CL","AtlassianJiraAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AtlassianJiraAudit","azuresentinel","azure-sentinel-solution-atlassianjiraaudit","2022-01-10","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","JiraAuditAPI","Atlassian","Atlassian Jira Audit","The [Atlassian Jira](https://www.atlassian.com/software/jira) Audit data connector provides the capability to ingest [Jira Audit Records](https://support.atlassian.com/jira-cloud-administration/docs/audit-activities-in-jira-applications/) events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-audit-records/) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Jira REST API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://aka.ms/sentinel-jiraauditapi-parser) to create the Kusto functions alias, **JiraAudit**""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Jira API**\n\n [Follow the instructions](https://developer.atlassian.com/cloud/jira/platform/rest/v3/intro/#authentication) to obtain the credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Jira Audit data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentineljiraauditazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentineljiraauditazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **JiraAccessToken**, **JiraUsername**, **JiraHomeSiteName** (short site name part, as example HOMESITENAME from https://community.atlassian.com) and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Jira Audit data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-jiraauditapi-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. JiraAuditXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tJiraUsername\n\t\tJiraAccessToken\n\t\tJiraHomeSiteName\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**JiraAccessToken**, **JiraUsername** is required for REST API. [See the documentation to learn more about API](https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-audit-records/). Check all [requirements and follow the instructions](https://developer.atlassian.com/cloud/jira/platform/rest/v3/intro/#authentication) for obtaining credentials.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AtlassianJiraAudit/Data%20Connectors/JiraAudit_API_FunctionApp.json","true" -"Jira_Audit_v2_CL","AtlassianJiraAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AtlassianJiraAudit","azuresentinel","azure-sentinel-solution-atlassianjiraaudit","2022-01-10","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","JiraAuditAPI","Atlassian","Atlassian Jira Audit","The [Atlassian Jira](https://www.atlassian.com/software/jira) Audit data connector provides the capability to ingest [Jira Audit Records](https://support.atlassian.com/jira-cloud-administration/docs/audit-activities-in-jira-applications/) events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-audit-records/) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Jira REST API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://aka.ms/sentinel-jiraauditapi-parser) to create the Kusto functions alias, **JiraAudit**""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Jira API**\n\n [Follow the instructions](https://developer.atlassian.com/cloud/jira/platform/rest/v3/intro/#authentication) to obtain the credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Jira Audit data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentineljiraauditazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentineljiraauditazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **JiraAccessToken**, **JiraUsername**, **JiraHomeSiteName** (short site name part, as example HOMESITENAME from https://community.atlassian.com) and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Jira Audit data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-jiraauditapi-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. JiraAuditXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tJiraUsername\n\t\tJiraAccessToken\n\t\tJiraHomeSiteName\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**JiraAccessToken**, **JiraUsername** is required for REST API. [See the documentation to learn more about API](https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-audit-records/). Check all [requirements and follow the instructions](https://developer.atlassian.com/cloud/jira/platform/rest/v3/intro/#authentication) for obtaining credentials.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AtlassianJiraAudit/Data%20Connectors/JiraAudit_API_FunctionApp.json","true" -"Jira_Audit_v2_CL","AtlassianJiraAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AtlassianJiraAudit","azuresentinel","azure-sentinel-solution-atlassianjiraaudit","2022-01-10","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","JiraAuditCCPDefinition","Microsoft","Atlassian Jira Audit (using REST API)","The [Atlassian Jira](https://www.atlassian.com/software/jira) Audit data connector provides the capability to ingest [Jira Audit Records](https://support.atlassian.com/jira-cloud-administration/docs/audit-activities-in-jira-applications/) events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-audit-records/) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""description"": ""To enable the Atlassian Jira connector for Microsoft Sentinel, click to add an organization, fill the form with the Jira environment credentials and click to Connect. \n Follow [these steps](https://support.atlassian.com/atlassian-account/docs/manage-api-tokens-for-your-atlassian-account/) to create an API token.\n "", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Atlassian Jira organization URL"", ""columnValue"": ""properties.request.apiEndpoint""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add organization"", ""title"": ""Add organization"", ""subtitle"": ""Add Atlassian Jira organization"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Atlassian Jira organization URL"", ""placeholder"": ""Atlassian Jira organization URL"", ""type"": ""string"", ""name"": ""jiraorganizationurl""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""User Name"", ""placeholder"": ""User Name (e.g., user@example.com)"", ""type"": ""securestring"", ""name"": ""userid""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""API Key"", ""type"": ""password"", ""name"": ""apikey""}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Atlassian Jira API access"", ""description"": ""Permission of [Administer Jira](https://developer.atlassian.com/cloud/jira/platform/rest/v3/intro/#authentication) is required to get access to the Jira Audit logs API. See [Jira API documentation](https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-audit-records/#api-group-audit-records) to learn more about the audit API.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AtlassianJiraAudit/Data%20Connectors/JiraAuditAPISentinelConnector_ccpv2/JiraAudit_DataConnectorDefinition.json","true" -"","Attacker Tools Threat Protection Essentials","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Attacker%20Tools%20Threat%20Protection%20Essentials","azuresentinel","azure-sentinel-solution-attackertools","2022-11-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"","Australian Cyber Security Centre","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Australian%20Cyber%20Security%20Centre","azuresentinel","azure-sentinel-solution-australiancybersecurity","2022-11-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"","Auth0","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Auth0","azuresentinel","azure-sentinel-solution-auth0","2022-08-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"Auth0AM_CL","Auth0","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Auth0","azuresentinel","azure-sentinel-solution-auth0","2022-08-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","Auth0","Auth0","Auth0 Access Management","The [Auth0 Access Management](https://auth0.com/access-management) data connector provides the capability to ingest [Auth0 log events](https://auth0.com/docs/api/management/v2/#!/Logs/get_logs) into Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Auth0 Management APIs to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Auth0 Management API**\n\n Follow the instructions to obtain the credentials.\n\n1. In Auth0 Dashboard, go to **Applications > Applications**.\n2. Select your Application. This should be a \""Machine-to-Machine\"" Application configured with at least **read:logs** and **read:logs_users** permissions.\n3. Copy **Domain, ClientID, Client Secret**""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Auth0 Access Management data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Auth0 Access Management data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-Auth0AccessManagement-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the ****Domain, ClientID, Client Secret****, **AzureSentinelWorkspaceId**, **AzureSentinelSharedKey**. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Auth0 Access Management data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-Auth0AccessManagement-azuredeploy) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. Auth0AMXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tDOMAIN\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**API token** is required. [See the documentation to learn more about API token](https://auth0.com/docs/secure/tokens/access-tokens/get-management-api-access-tokens-for-production)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Auth0/Data%20Connectors/Auth0_FunctionApp.json","true" -"Auth0Logs_CL","Auth0","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Auth0","azuresentinel","azure-sentinel-solution-auth0","2022-08-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","Auth0ConnectorCCPDefinition","Microsoft","Auth0 Logs","The [Auth0](https://auth0.com/docs/api/management/v2/logs/get-logs) data connector allows ingesting logs from Auth0 API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses Auth0 API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### STEP 1 - Configuration steps for the Auth0 Management API""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""Follow the instructions to obtain the credentials. \n 1. In Auth0 Dashboard, go to [**Applications > Applications**]\n 2. Select your Application. This should be a [**Machine-to-Machine**] Application configured with at least [**read:logs**] and [**read:logs_users**] permissions. \n 3. Copy [**Domain, ClientID, Client Secret**]""}}, {""parameters"": {""label"": ""Base API URL"", ""placeholder"": ""https://example.auth0.com"", ""type"": ""text"", ""name"": ""Domain""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""Client ID"", ""placeholder"": ""Client ID"", ""type"": ""text"", ""name"": ""ClientId""}, ""type"": ""Textbox""}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client Secret"", ""placeholder"": ""API Token"", ""type"": ""password"", ""name"": ""ClientSecret""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Auth0/Data%20Connectors/Auth0_CCP/DataConnectorDefinition.json","true" -"","Authomize","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Authomize","authomize","azure-sentinel-solution-authomize","2023-06-15","","","Authomize","Partner","https://support.authomize.com","","domains,verticals","","","","","","","false","","false" -"Authomize_v2_CL","Authomize","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Authomize","authomize","azure-sentinel-solution-authomize","2023-06-15","","","Authomize","Partner","https://support.authomize.com","","domains,verticals","Authomize","Authomize","Authomize Data Connector","The Authomize Data Connector provides the capability to ingest custom log types from Authomize into Microsoft Sentinel.","[{""title"": ""1. Locate your Authomize API key"", ""description"": ""Follow the setup instructions [located under Data Connectors for Authomize](https://github.com/authomize/Open-ITDR/blob/main/Open-Connectors/Platform/Azure-Sentinel/Data%20Connectors/readme.md).""}, {""title"": ""2. Deploy the Authomize data connector using the setup instructions."", ""description"": ""Follow the Instructions on [deploying the data connector to ingest data from Authomize](https://github.com/authomize/Open-ITDR/blob/main/Open-Connectors/Platform/Azure-Sentinel/Data%20Connectors/readme.md).""}, {""title"": ""3. Finalize your setup"", ""description"": ""Validate that your script is running. Simple instructions are located under the [Authomize Data Connector area](https://github.com/authomize/Open-ITDR/blob/main/Open-Connectors/Platform/Azure-Sentinel/Data%20Connectors/readme.md).""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Include custom pre-requisites if the connectivity requires - else delete customs"", ""description"": ""Description for any custom pre-requisite""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Authomize/Data%20Connectors/AuthomizeCustomConnector.json","true" -"","Azure Activity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Activity","azuresentinel","azure-sentinel-solution-azureactivity","2022-04-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"AzureActivity","Azure Activity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Activity","azuresentinel","azure-sentinel-solution-azureactivity","2022-04-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureActivity","Microsoft","Azure Activity","Azure Activity Log is a subscription log that provides insight into subscription-level events that occur in Azure, including events from Azure Resource Manager operational data, service health events, write operations taken on the resources in your subscription, and the status of activities performed in Azure. For more information, see the [Microsoft Sentinel documentation ](https://go.microsoft.com/fwlink/p/?linkid=2219695&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""instructions"": [{""parameters"": {""text"": ""This connector has been updated to use the diagnostics settings back-end pipeline. which provides increased functionality and better consistency with resource logs.\nConnectors using this pipeline can also be governed at scale by Azure Policy. Learn more about the new Azure Activity connector.\nFollow the instructions below to upgrade your connector to the diagnostics settings pipeline."", ""visible"": true, ""inline"": false}, ""type"": ""InfoMessage""}]}, {""title"": ""1. Disconnect your subscriptions from the legacy method"", ""description"": ""The subscriptions listed below are still using the older, legacy method. You are strongly encouraged to upgrade to the new pipeline.
\nTo do this, click on the 'Disconnect All' button below, before proceeding to launch the Azure Policy Assignment wizard."", ""instructions"": [{""parameters"": {""datasourceName"": ""AzureActivityLog""}, ""type"": ""OmsDatasource""}]}, {""title"": ""2. Connect your subscriptions through diagnostic settings new pipeline"", ""description"": ""This connector uses Azure Policy to apply a single Azure Subscription log-streaming configuration to a collection of subscriptions, defined as a scope.\nFollow the instructions below to create and apply a policy to all current and future subscriptions. **Note**, you may already have an active policy for this resource type."", ""innerSteps"": [{""title"": ""Launch the Azure Policy Assignment wizard and follow the steps.\u200b"", ""description"": "">1. In the **Basics** tab, click the button with the three dots under **Scope** to select your resources assignment scope.\n >2. In the **Parameters** tab, choose your Microsoft Sentinel workspace from the **Log Analytics workspace** drop-down list, and leave marked as \""True\"" all the log and metric types you want to ingest.\n >3. To apply the policy on your existing resources, select the **Remediation tab** and mark the **Create a remediation task** checkbox."", ""instructions"": [{""parameters"": {""linkType"": ""OpenPolicyAssignment"", ""policyDefinitionGuid"": ""2465583e-4e78-4c15-b6be-a36cbc7c8b0f"", ""assignMode"": 1}, ""type"": ""InstallAgent""}]}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Policy\u200b"", ""description"": ""owner role assigned for each policy assignment scope.\u200b""}, {""name"": ""Subscription"", ""description"": ""owner role permission on the relevant subscription""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Activity/Data%20Connectors/AzureActivity.json","true" -"","Azure Batch Account","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Batch%20Account","azuresentinel","azure-sentinel-solution-batchaccount","2022-06-30","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"AzureDiagnostics","Azure Batch Account","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Batch%20Account","azuresentinel","azure-sentinel-solution-batchaccount","2022-06-30","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AzureBatchAccount_CCP","Microsoft","Azure Batch Account","Azure Batch Account is a uniquely identified entity within the Batch service. Most Batch solutions use Azure Storage for storing resource files and output files, so each Batch account is usually associated with a corresponding storage account. This connector lets you stream your Azure Batch account diagnostics logs into Microsoft Sentinel, allowing you to continuously monitor activity. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2224103&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Connect your Azure Batch Account diagnostics logs into Sentinel."", ""description"": ""This connector uses Azure Policy to apply a single Azure Batch Account log-streaming configuration to a collection of instances, defined as a scope. Follow the instructions below to create and apply a policy to all current and future instances. Note, you may already have an active policy for this resource type."", ""instructions"": [{""parameters"": {""noFxPadding"": ""1"", ""expanded"": ""1"", ""instructionSteps"": [{""title"": ""Stream diagnostics logs from your Azure Batch Account at scale"", ""innerSteps"": [{""title"": ""Launch the Azure Policy Assignment wizard and follow the steps. "", ""description"": ""> 1. In the **Basics** tab, click the button with the three dots under **Scope** to select your subscription.
2. In the **Parameters** tab, choose your Microsoft Sentinel workspace from the **Log Analytics workspace** drop-down list, and leave marked as \""True\"" all the log categories you want to ingest.
3. To apply the policy on your existing resources, mark the **Create a remediation task** check box in the **Remediation** tab."", ""instructions"": [{""parameters"": {""policyDefinitionGuid"": ""c84e5349-db6d-4769-805e-e14037dab9b5""}, ""type"": ""PolicyAssignment""}]}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Policy"", ""description"": ""owner role assigned for each policy assignment scope""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Batch%20Account/Data%20Connectors/AzureBatchAccount_CCP.JSON","true" -"","Azure Cloud NGFW by Palo Alto Networks","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Cloud%20NGFW%20by%20Palo%20Alto%20Networks","paloaltonetworks","cloudngfw-sentinel-solution","2023-11-03","2023-11-03","","Palo Alto Networks","Partner","https://support.paloaltonetworks.com","","domains","","","","","","","false","","false" -"fluentbit_CL","Azure Cloud NGFW by Palo Alto Networks","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Cloud%20NGFW%20by%20Palo%20Alto%20Networks","paloaltonetworks","cloudngfw-sentinel-solution","2023-11-03","2023-11-03","","Palo Alto Networks","Partner","https://support.paloaltonetworks.com","","domains","AzureCloudNGFWByPaloAltoNetworks","Palo Alto Networks","Azure CloudNGFW By Palo Alto Networks","Cloud Next-Generation Firewall by Palo Alto Networks - an Azure Native ISV Service - is Palo Alto Networks Next-Generation Firewall (NGFW) delivered as a cloud-native service on Azure. You can discover Cloud NGFW in the Azure Marketplace and consume it in your Azure Virtual Networks (VNet). With Cloud NGFW, you can access the core NGFW capabilities such as App-ID, URL filtering based technologies. It provides threat prevention and detection through cloud-delivered security services and threat prevention signatures. The connector allows you to easily connect your Cloud NGFW logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities. For more information, see the [Cloud NGFW for Azure documentation](https://docs.paloaltonetworks.com/cloud-ngfw/azure).","[{""title"": ""Connect Cloud NGFW by Palo Alto Networks to Microsoft Sentinel"", ""description"": ""Enable Log Settings on All Cloud NGFWs by Palo Alto Networks."", ""instructions"": [{""parameters"": {""linkType"": ""OpenCloudNGFW""}, ""type"": ""ConfigureLogSettings""}]}, {""title"": """", ""description"": ""Inside your Cloud NGFW resource:\n\n1. Navigate to the **Log Settings** from the homepage.\n2. Ensure the **Enable Log Settings** checkbox is checked.\n3. From the **Log Settings** drop-down, choose the desired Log Analytics Workspace.\n4. Confirm your selections and configurations.\n5. Click **Save** to apply the settings.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Cloud%20NGFW%20by%20Palo%20Alto%20Networks/Data%20Connectors/CloudNgfwByPAN.json","true" -"","Azure Cognitive Search","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Cognitive%20Search","azuresentinel","azure-sentinel-solution-azurecognitivesearch","2022-06-28","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"AzureDiagnostics","Azure Cognitive Search","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Cognitive%20Search","azuresentinel","azure-sentinel-solution-azurecognitivesearch","2022-06-28","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AzureCognitiveSearch_CCP","Microsoft","Azure Cognitive Search","Azure Cognitive Search is a cloud search service that gives developers infrastructure, APIs, and tools for building a rich search experience over private, heterogeneous content in web, mobile, and enterprise applications. This connector lets you stream your Azure Cognitive Search diagnostics logs into Microsoft Sentinel, allowing you to continuously monitor activity. ","[{""title"": ""Connect your Azure Cognitive Search diagnostics logs into Sentinel."", ""description"": ""This connector uses Azure Policy to apply a single Azure Cognitive Search log-streaming configuration to a collection of instances, defined as a scope. Follow the instructions below to create and apply a policy to all current and future instances. Note, you may already have an active policy for this resource type."", ""instructions"": [{""parameters"": {""noFxPadding"": ""1"", ""expanded"": ""1"", ""instructionSteps"": [{""title"": ""Stream diagnostics logs from your Azure Cognitive Search at scale"", ""innerSteps"": [{""title"": ""Launch the Azure Policy Assignment wizard and follow the steps. "", ""description"": ""> 1. In the **Basics** tab, click the button with the three dots under **Scope** to select your subscription.
2. In the **Parameters** tab, choose your Microsoft Sentinel workspace from the **Log Analytics workspace** drop-down list, and leave marked as \""True\"" all the log categories you want to ingest.
3. To apply the policy on your existing resources, mark the **Create a remediation task** check box in the **Remediation** tab."", ""instructions"": [{""parameters"": {""policyDefinitionGuid"": ""08ba64b8-738f-4918-9686-730d2ed79c7d""}, ""type"": ""PolicyAssignment""}]}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Policy"", ""description"": ""owner role assigned for each policy assignment scope""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Cognitive%20Search/Data%20Connectors/AzureCognitiveSearch_CCP.JSON","true" -"","Azure DDoS Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20DDoS%20Protection","azuresentinel","azure-sentinel-solution-azureddosprotection","2022-05-13","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"AzureDiagnostics","Azure DDoS Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20DDoS%20Protection","azuresentinel","azure-sentinel-solution-azureddosprotection","2022-05-13","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","DDOS","Microsoft","Azure DDoS Protection","Connect to Azure DDoS Protection Standard logs via Public IP Address Diagnostic Logs. In addition to the core DDoS protection in the platform, Azure DDoS Protection Standard provides advanced DDoS mitigation capabilities against network attacks. It's automatically tuned to protect your specific Azure resources. Protection is simple to enable during the creation of new virtual networks. It can also be done after creation and requires no application or resource changes. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2219760&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Connect Azure DDoS Protection to Microsoft Sentinel"", ""description"": ""Enable Diagnostic Logs on All Public IP Addresses."", ""instructions"": [{""parameters"": {""linkType"": ""OpenMicrosoftAzureMonitoring""}, ""type"": ""InstallAgent""}]}, {""title"": ""Inside your Diagnostics settings portal, select your Public IP Address resource:"", ""description"": ""Inside your Public IP Address resource:\n \n1. Select **+ Add diagnostic setting.\u200b**\n2. In the **Diagnostic setting** blade:\n - Type a **Name**, within the **Diagnostics settings** name field.\n - Select **Send to Log Analytics**.\n - Choose the log destination workspace.\n - Select the categories that you want to analyze (recommended: DDoSProtectionNotifications, DDoSMitigationFlowLogs, DDoSMitigationReports)\n - Click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Azure DDoS protection plan"", ""description"": ""A configured Azure DDoS Standard protection plan [read more about Azure DDoS protection plans](https://docs.microsoft.com/azure/virtual-network/manage-ddos-protection#create-a-ddos-protection-plan).""}, {""name"": ""Enabled Azure DDoS for virtual network"", ""description"": ""A configured virtual network with Azure DDoS Standard enabled [read more about configuring virtual network with Azure DDoS](https://docs.microsoft.com/azure/virtual-network/manage-ddos-protection#enable-ddos-for-an-existing-virtual-network).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20DDoS%20Protection/Data%20Connectors/DDOS.JSON","true" -"","Azure Data Lake Storage Gen1","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Data%20Lake%20Storage%20Gen1","azuresentinel","azure-sentinel-solution-datalakestoragegen1","2022-06-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"AzureDiagnostics","Azure Data Lake Storage Gen1","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Data%20Lake%20Storage%20Gen1","azuresentinel","azure-sentinel-solution-datalakestoragegen1","2022-06-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureDataLakeStorageGen1_CCP","Microsoft","Azure Data Lake Storage Gen1","Azure Data Lake Storage Gen1 is an enterprise-wide hyper-scale repository for big data analytic workloads. Azure Data Lake enables you to capture data of any size, type, and ingestion speed in one single place for operational and exploratory analytics. This connector lets you stream your Azure Data Lake Storage Gen1 diagnostics logs into Microsoft Sentinel, allowing you to continuously monitor activity. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2223812&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Connect your Azure Data Lake Storage Gen1 diagnostics logs into Sentinel."", ""description"": ""This connector uses Azure Policy to apply a single Azure Data Lake Storage Gen1 log-streaming configuration to a collection of instances, defined as a scope. Follow the instructions below to create and apply a policy to all current and future instances. Note, you may already have an active policy for this resource type."", ""instructions"": [{""parameters"": {""noFxPadding"": ""1"", ""expanded"": ""1"", ""instructionSteps"": [{""title"": ""Stream diagnostics logs from your Azure Data Lake Storage Gen1 at scale"", ""innerSteps"": [{""title"": ""Launch the Azure Policy Assignment wizard and follow the steps. "", ""description"": ""> 1. In the **Basics** tab, click the button with the three dots under **Scope** to select your subscription.
2. In the **Parameters** tab, choose your Microsoft Sentinel workspace from the **Log Analytics workspace** drop-down list, and leave marked as \""True\"" all the log categories you want to ingest.
3. To apply the policy on your existing resources, mark the **Create a remediation task** check box in the **Remediation** tab."", ""instructions"": [{""parameters"": {""policyDefinitionGuid"": ""25763a0a-5783-4f14-969e-79d4933eb74b""}, ""type"": ""PolicyAssignment""}]}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Policy"", ""description"": ""owner role assigned for each policy assignment scope""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Data%20Lake%20Storage%20Gen1/Data%20Connectors/AzureDataLakeStorageGen1_CCP.JSON","true" -"","Azure Event Hubs","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Event%20Hubs","azuresentinel","azure-sentinel-solution-eventhub","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"AzureDiagnostics","Azure Event Hubs","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Event%20Hubs","azuresentinel","azure-sentinel-solution-eventhub","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AzureEventHub_CCP","Microsoft","Azure Event Hub","Azure Event Hubs is a big data streaming platform and event ingestion service. It can receive and process millions of events per second. This connector lets you stream your Azure Event Hub diagnostics logs into Microsoft Sentinel, allowing you to continuously monitor activity. ","[{""title"": ""Connect your Azure Event Hub diagnostics logs into Sentinel."", ""description"": ""This connector uses Azure Policy to apply a single Azure Event Hub log-streaming configuration to a collection of instances, defined as a scope. Follow the instructions below to create and apply a policy to all current and future instances. Note, you may already have an active policy for this resource type."", ""instructions"": [{""parameters"": {""noFxPadding"": ""1"", ""expanded"": ""1"", ""instructionSteps"": [{""title"": ""Stream diagnostics logs from your Azure Event Hub at scale"", ""innerSteps"": [{""title"": ""Launch the Azure Policy Assignment wizard and follow the steps. "", ""description"": ""> 1. In the **Basics** tab, click the button with the three dots under **Scope** to select your subscription.
2. In the **Parameters** tab, choose your Microsoft Sentinel workspace from the **Log Analytics workspace** drop-down list, and leave marked as \""True\"" all the log categories you want to ingest.
3. To apply the policy on your existing resources, mark the **Create a remediation task** check box in the **Remediation** tab."", ""instructions"": [{""parameters"": {""policyDefinitionGuid"": ""1f6e93e8-6b31-41b1-83f6-36e449a42579""}, ""type"": ""PolicyAssignment""}]}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Policy"", ""description"": ""owner role assigned for each policy assignment scope""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Event%20Hubs/Data%20Connectors/AzureEventHub_CCP.JSON","true" -"","Azure Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Firewall","sentinel4azurefirewall","sentinel4azurefirewall","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"AZFWApplicationRule","Azure Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Firewall","sentinel4azurefirewall","sentinel4azurefirewall","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureFirewall","Microsoft","Azure Firewall","Connect to Azure Firewall. Azure Firewall is a managed, cloud-based network security service that protects your Azure Virtual Network resources. It's a fully stateful firewall as a service with built-in high availability and unrestricted cloud scalability. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220124&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Connect Azure Firewall to Microsoft Sentinel"", ""description"": ""Enable Diagnostic Logs on All Firewalls."", ""instructions"": [{""parameters"": {""linkType"": ""OpenAzureFirewall""}, ""type"": ""InstallAgent""}]}, {""title"": """", ""description"": ""Inside your Firewall resource:\n\n1. Select **Diagnostic logs.\u200b**\n2. Select **+ Add diagnostic setting.\u200b**\n3. In the **Diagnostic setting** blade:\n - Type a **Name**.\n - Select **Send to Log Analytics**.\n - Choose the log destination workspace.\n - Select the categories that you want to analyze ( Azure Firewall Network Rule, Azure Firewall Application Rule,Azure Firewall Nat Rule,Azure Firewall Threat Intelligence,Azure Firewall IDPS Signature,Azure Firewall DNS query,Azure Firewall FQDN Resolution Failure,Azure Firewall Fat Flow Log,Azure Firewall Flow Trace Log)\n - Click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Firewall/Data%20Connectors/AzureFirewall.JSON","true" -"AZFWDnsQuery","Azure Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Firewall","sentinel4azurefirewall","sentinel4azurefirewall","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureFirewall","Microsoft","Azure Firewall","Connect to Azure Firewall. Azure Firewall is a managed, cloud-based network security service that protects your Azure Virtual Network resources. It's a fully stateful firewall as a service with built-in high availability and unrestricted cloud scalability. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220124&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Connect Azure Firewall to Microsoft Sentinel"", ""description"": ""Enable Diagnostic Logs on All Firewalls."", ""instructions"": [{""parameters"": {""linkType"": ""OpenAzureFirewall""}, ""type"": ""InstallAgent""}]}, {""title"": """", ""description"": ""Inside your Firewall resource:\n\n1. Select **Diagnostic logs.\u200b**\n2. Select **+ Add diagnostic setting.\u200b**\n3. In the **Diagnostic setting** blade:\n - Type a **Name**.\n - Select **Send to Log Analytics**.\n - Choose the log destination workspace.\n - Select the categories that you want to analyze ( Azure Firewall Network Rule, Azure Firewall Application Rule,Azure Firewall Nat Rule,Azure Firewall Threat Intelligence,Azure Firewall IDPS Signature,Azure Firewall DNS query,Azure Firewall FQDN Resolution Failure,Azure Firewall Fat Flow Log,Azure Firewall Flow Trace Log)\n - Click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Firewall/Data%20Connectors/AzureFirewall.JSON","true" -"AZFWFatFlow","Azure Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Firewall","sentinel4azurefirewall","sentinel4azurefirewall","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureFirewall","Microsoft","Azure Firewall","Connect to Azure Firewall. Azure Firewall is a managed, cloud-based network security service that protects your Azure Virtual Network resources. It's a fully stateful firewall as a service with built-in high availability and unrestricted cloud scalability. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220124&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Connect Azure Firewall to Microsoft Sentinel"", ""description"": ""Enable Diagnostic Logs on All Firewalls."", ""instructions"": [{""parameters"": {""linkType"": ""OpenAzureFirewall""}, ""type"": ""InstallAgent""}]}, {""title"": """", ""description"": ""Inside your Firewall resource:\n\n1. Select **Diagnostic logs.\u200b**\n2. Select **+ Add diagnostic setting.\u200b**\n3. In the **Diagnostic setting** blade:\n - Type a **Name**.\n - Select **Send to Log Analytics**.\n - Choose the log destination workspace.\n - Select the categories that you want to analyze ( Azure Firewall Network Rule, Azure Firewall Application Rule,Azure Firewall Nat Rule,Azure Firewall Threat Intelligence,Azure Firewall IDPS Signature,Azure Firewall DNS query,Azure Firewall FQDN Resolution Failure,Azure Firewall Fat Flow Log,Azure Firewall Flow Trace Log)\n - Click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Firewall/Data%20Connectors/AzureFirewall.JSON","true" -"AZFWFlowTrace","Azure Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Firewall","sentinel4azurefirewall","sentinel4azurefirewall","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureFirewall","Microsoft","Azure Firewall","Connect to Azure Firewall. Azure Firewall is a managed, cloud-based network security service that protects your Azure Virtual Network resources. It's a fully stateful firewall as a service with built-in high availability and unrestricted cloud scalability. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220124&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Connect Azure Firewall to Microsoft Sentinel"", ""description"": ""Enable Diagnostic Logs on All Firewalls."", ""instructions"": [{""parameters"": {""linkType"": ""OpenAzureFirewall""}, ""type"": ""InstallAgent""}]}, {""title"": """", ""description"": ""Inside your Firewall resource:\n\n1. Select **Diagnostic logs.\u200b**\n2. Select **+ Add diagnostic setting.\u200b**\n3. In the **Diagnostic setting** blade:\n - Type a **Name**.\n - Select **Send to Log Analytics**.\n - Choose the log destination workspace.\n - Select the categories that you want to analyze ( Azure Firewall Network Rule, Azure Firewall Application Rule,Azure Firewall Nat Rule,Azure Firewall Threat Intelligence,Azure Firewall IDPS Signature,Azure Firewall DNS query,Azure Firewall FQDN Resolution Failure,Azure Firewall Fat Flow Log,Azure Firewall Flow Trace Log)\n - Click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Firewall/Data%20Connectors/AzureFirewall.JSON","true" -"AZFWIdpsSignature","Azure Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Firewall","sentinel4azurefirewall","sentinel4azurefirewall","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureFirewall","Microsoft","Azure Firewall","Connect to Azure Firewall. Azure Firewall is a managed, cloud-based network security service that protects your Azure Virtual Network resources. It's a fully stateful firewall as a service with built-in high availability and unrestricted cloud scalability. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220124&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Connect Azure Firewall to Microsoft Sentinel"", ""description"": ""Enable Diagnostic Logs on All Firewalls."", ""instructions"": [{""parameters"": {""linkType"": ""OpenAzureFirewall""}, ""type"": ""InstallAgent""}]}, {""title"": """", ""description"": ""Inside your Firewall resource:\n\n1. Select **Diagnostic logs.\u200b**\n2. Select **+ Add diagnostic setting.\u200b**\n3. In the **Diagnostic setting** blade:\n - Type a **Name**.\n - Select **Send to Log Analytics**.\n - Choose the log destination workspace.\n - Select the categories that you want to analyze ( Azure Firewall Network Rule, Azure Firewall Application Rule,Azure Firewall Nat Rule,Azure Firewall Threat Intelligence,Azure Firewall IDPS Signature,Azure Firewall DNS query,Azure Firewall FQDN Resolution Failure,Azure Firewall Fat Flow Log,Azure Firewall Flow Trace Log)\n - Click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Firewall/Data%20Connectors/AzureFirewall.JSON","true" -"AZFWInternalFqdnResolutionFailure","Azure Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Firewall","sentinel4azurefirewall","sentinel4azurefirewall","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureFirewall","Microsoft","Azure Firewall","Connect to Azure Firewall. Azure Firewall is a managed, cloud-based network security service that protects your Azure Virtual Network resources. It's a fully stateful firewall as a service with built-in high availability and unrestricted cloud scalability. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220124&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Connect Azure Firewall to Microsoft Sentinel"", ""description"": ""Enable Diagnostic Logs on All Firewalls."", ""instructions"": [{""parameters"": {""linkType"": ""OpenAzureFirewall""}, ""type"": ""InstallAgent""}]}, {""title"": """", ""description"": ""Inside your Firewall resource:\n\n1. Select **Diagnostic logs.\u200b**\n2. Select **+ Add diagnostic setting.\u200b**\n3. In the **Diagnostic setting** blade:\n - Type a **Name**.\n - Select **Send to Log Analytics**.\n - Choose the log destination workspace.\n - Select the categories that you want to analyze ( Azure Firewall Network Rule, Azure Firewall Application Rule,Azure Firewall Nat Rule,Azure Firewall Threat Intelligence,Azure Firewall IDPS Signature,Azure Firewall DNS query,Azure Firewall FQDN Resolution Failure,Azure Firewall Fat Flow Log,Azure Firewall Flow Trace Log)\n - Click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Firewall/Data%20Connectors/AzureFirewall.JSON","true" -"AZFWNatRule","Azure Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Firewall","sentinel4azurefirewall","sentinel4azurefirewall","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureFirewall","Microsoft","Azure Firewall","Connect to Azure Firewall. Azure Firewall is a managed, cloud-based network security service that protects your Azure Virtual Network resources. It's a fully stateful firewall as a service with built-in high availability and unrestricted cloud scalability. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220124&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Connect Azure Firewall to Microsoft Sentinel"", ""description"": ""Enable Diagnostic Logs on All Firewalls."", ""instructions"": [{""parameters"": {""linkType"": ""OpenAzureFirewall""}, ""type"": ""InstallAgent""}]}, {""title"": """", ""description"": ""Inside your Firewall resource:\n\n1. Select **Diagnostic logs.\u200b**\n2. Select **+ Add diagnostic setting.\u200b**\n3. In the **Diagnostic setting** blade:\n - Type a **Name**.\n - Select **Send to Log Analytics**.\n - Choose the log destination workspace.\n - Select the categories that you want to analyze ( Azure Firewall Network Rule, Azure Firewall Application Rule,Azure Firewall Nat Rule,Azure Firewall Threat Intelligence,Azure Firewall IDPS Signature,Azure Firewall DNS query,Azure Firewall FQDN Resolution Failure,Azure Firewall Fat Flow Log,Azure Firewall Flow Trace Log)\n - Click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Firewall/Data%20Connectors/AzureFirewall.JSON","true" -"AZFWNetworkRule","Azure Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Firewall","sentinel4azurefirewall","sentinel4azurefirewall","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureFirewall","Microsoft","Azure Firewall","Connect to Azure Firewall. Azure Firewall is a managed, cloud-based network security service that protects your Azure Virtual Network resources. It's a fully stateful firewall as a service with built-in high availability and unrestricted cloud scalability. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220124&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Connect Azure Firewall to Microsoft Sentinel"", ""description"": ""Enable Diagnostic Logs on All Firewalls."", ""instructions"": [{""parameters"": {""linkType"": ""OpenAzureFirewall""}, ""type"": ""InstallAgent""}]}, {""title"": """", ""description"": ""Inside your Firewall resource:\n\n1. Select **Diagnostic logs.\u200b**\n2. Select **+ Add diagnostic setting.\u200b**\n3. In the **Diagnostic setting** blade:\n - Type a **Name**.\n - Select **Send to Log Analytics**.\n - Choose the log destination workspace.\n - Select the categories that you want to analyze ( Azure Firewall Network Rule, Azure Firewall Application Rule,Azure Firewall Nat Rule,Azure Firewall Threat Intelligence,Azure Firewall IDPS Signature,Azure Firewall DNS query,Azure Firewall FQDN Resolution Failure,Azure Firewall Fat Flow Log,Azure Firewall Flow Trace Log)\n - Click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Firewall/Data%20Connectors/AzureFirewall.JSON","true" -"AZFWThreatIntel","Azure Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Firewall","sentinel4azurefirewall","sentinel4azurefirewall","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureFirewall","Microsoft","Azure Firewall","Connect to Azure Firewall. Azure Firewall is a managed, cloud-based network security service that protects your Azure Virtual Network resources. It's a fully stateful firewall as a service with built-in high availability and unrestricted cloud scalability. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220124&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Connect Azure Firewall to Microsoft Sentinel"", ""description"": ""Enable Diagnostic Logs on All Firewalls."", ""instructions"": [{""parameters"": {""linkType"": ""OpenAzureFirewall""}, ""type"": ""InstallAgent""}]}, {""title"": """", ""description"": ""Inside your Firewall resource:\n\n1. Select **Diagnostic logs.\u200b**\n2. Select **+ Add diagnostic setting.\u200b**\n3. In the **Diagnostic setting** blade:\n - Type a **Name**.\n - Select **Send to Log Analytics**.\n - Choose the log destination workspace.\n - Select the categories that you want to analyze ( Azure Firewall Network Rule, Azure Firewall Application Rule,Azure Firewall Nat Rule,Azure Firewall Threat Intelligence,Azure Firewall IDPS Signature,Azure Firewall DNS query,Azure Firewall FQDN Resolution Failure,Azure Firewall Fat Flow Log,Azure Firewall Flow Trace Log)\n - Click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Firewall/Data%20Connectors/AzureFirewall.JSON","true" -"AzureDiagnostics","Azure Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Firewall","sentinel4azurefirewall","sentinel4azurefirewall","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureFirewall","Microsoft","Azure Firewall","Connect to Azure Firewall. Azure Firewall is a managed, cloud-based network security service that protects your Azure Virtual Network resources. It's a fully stateful firewall as a service with built-in high availability and unrestricted cloud scalability. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220124&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Connect Azure Firewall to Microsoft Sentinel"", ""description"": ""Enable Diagnostic Logs on All Firewalls."", ""instructions"": [{""parameters"": {""linkType"": ""OpenAzureFirewall""}, ""type"": ""InstallAgent""}]}, {""title"": """", ""description"": ""Inside your Firewall resource:\n\n1. Select **Diagnostic logs.\u200b**\n2. Select **+ Add diagnostic setting.\u200b**\n3. In the **Diagnostic setting** blade:\n - Type a **Name**.\n - Select **Send to Log Analytics**.\n - Choose the log destination workspace.\n - Select the categories that you want to analyze ( Azure Firewall Network Rule, Azure Firewall Application Rule,Azure Firewall Nat Rule,Azure Firewall Threat Intelligence,Azure Firewall IDPS Signature,Azure Firewall DNS query,Azure Firewall FQDN Resolution Failure,Azure Firewall Fat Flow Log,Azure Firewall Flow Trace Log)\n - Click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Firewall/Data%20Connectors/AzureFirewall.JSON","true" -"","Azure Key Vault","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Key%20Vault","azuresentinel","azure-sentinel-solution-azurekeyvault","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"AzureDiagnostics","Azure Key Vault","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Key%20Vault","azuresentinel","azure-sentinel-solution-azurekeyvault","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AzureKeyVault","Microsoft","Azure Key Vault","Azure Key Vault is a cloud service for securely storing and accessing secrets. A secret is anything that you want to tightly control access to, such as API keys, passwords, certificates, or cryptographic keys. This connector lets you stream your Azure Key Vault diagnostics logs into Microsoft Sentinel, allowing you to continuously monitor activity in all your instances. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220125&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Connect your Azure Key Vault diagnostics logs into Sentinel."", ""description"": ""This connector uses Azure Policy to apply a single Azure Key Vault log-streaming configuration to a collection of instances, defined as a scope. \nFollow the instructions below to create and apply a policy to all current and future instances. **Note**, you may already have an active policy for this resource type."", ""instructions"": [{""parameters"": {""noFxPadding"": true, ""expanded"": true, ""instructionSteps"": [{""title"": ""Stream diagnostics logs from your Azure Key Vault at scale"", ""description"": null, ""innerSteps"": [{""title"": ""Launch the Azure Policy Assignment wizard and follow the steps.\u200b"", ""description"": "">1. In the **Basics** tab, click the button with the three dots under **Scope** to select your resources assignment scope.\n >2. In the **Parameters** tab, choose your Microsoft Sentinel workspace from the **Log Analytics workspace** drop-down list, and leave marked as \""True\"" all the log and metric types you want to ingest.\n >3. To apply the policy on your existing resources, select the **Remediation tab** and mark the **Create a remediation task** checkbox."", ""instructions"": [{""parameters"": {""linkType"": ""OpenPolicyAssignment"", ""policyDefinitionGuid"": ""951af2fa-529b-416e-ab6e-066fd85ac459"", ""assignMode"": 1}, ""type"": ""InstallAgent""}]}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Policy\u200b"", ""description"": ""owner role assigned for each policy assignment scope.\u200b""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Key%20Vault/Data%20Connectors/AzureKeyVault.JSON","true" -"","Azure Logic Apps","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Logic%20Apps","azuresentinel","azure-sentinel-solution-logicapps","2022-06-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"AzureDiagnostics","Azure Logic Apps","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Logic%20Apps","azuresentinel","azure-sentinel-solution-logicapps","2022-06-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AzureLogicApps_CCP","Microsoft","Azure Logic Apps","Azure Logic Apps is a cloud-based platform for creating and running automated workflows that integrate your apps, data, services, and systems. This connector lets you stream your Azure Logic Apps diagnostics logs into Microsoft Sentinel, allowing you to continuously monitor activity. ","[{""title"": ""Connect your Logic Apps diagnostics logs into Sentinel."", ""description"": ""This connector uses Azure Policy to apply a single Azure Logic Apps log-streaming configuration to a collection of instances, defined as a scope. Follow the instructions below to create and apply a policy to all current and future instances. Note, you may already have an active policy for this resource type."", ""instructions"": [{""parameters"": {""noFxPadding"": ""1"", ""expanded"": ""1"", ""instructionSteps"": [{""title"": ""Stream diagnostics logs from your Azure Logic Apps at scale"", ""innerSteps"": [{""title"": ""Launch the Azure Policy Assignment wizard and follow the steps. "", ""description"": ""> 1. In the **Basics** tab, click the button with the three dots under **Scope** to select your subscription.
2. In the **Parameters** tab, choose your Microsoft Sentinel workspace from the **Log Analytics workspace** drop-down list, and leave marked as \""True\"" all the log categories you want to ingest.
3. To apply the policy on your existing resources, mark the **Create a remediation task** check box in the **Remediation** tab."", ""instructions"": [{""parameters"": {""policyDefinitionGuid"": ""b889a06c-ec72-4b03-910a-cb169ee18721""}, ""type"": ""PolicyAssignment""}]}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Policy"", ""description"": ""owner role assigned for each policy assignment scope""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Logic%20Apps/Data%20Connectors/AzureLogicApps_CCP.JSON","true" -"","Azure Network Security Groups","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Network%20Security%20Groups","azuresentinel","azure-sentinel-solution-networksecuritygroup","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"AzureDiagnostics","Azure Network Security Groups","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Network%20Security%20Groups","azuresentinel","azure-sentinel-solution-networksecuritygroup","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AzureNSG","Microsoft","Network Security Groups","Azure network security groups (NSG) allow you to filter network traffic to and from Azure resources in an Azure virtual network. A network security group includes rules that allow or deny traffic to a virtual network subnet, network interface, or both.

When you enable logging for an NSG, you can gather the following types of resource log information:

- **Event:** Entries are logged for which NSG rules are applied to VMs, based on MAC address.
- **Rule counter:** Contains entries for how many times each NSG rule is applied to deny or allow traffic. The status for these rules is collected every 300 seconds.


This connector lets you stream your NSG diagnostics logs into Microsoft Sentinel, allowing you to continuously monitor activity in all your instances. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2223718&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Stream diagnostics logs from your Connect your Network Security Groups diagnostics logs into Sentinel. at scale"", ""description"": ""This connector uses Azure Policy to apply a single Azure Network Security Groups log-streaming configuration to a collection of instances, defined as a scope. \nFollow the instructions below to create and apply a policy to all current and future instances. **Note**, you may already have an active policy for this resource type."", ""innerSteps"": [{""title"": ""Launch the Azure Policy Assignment wizard and follow the steps.\u200b"", ""description"": "">1. In the **Basics** tab, click the button with the three dots under **Scope** to select your resources assignment scope.\n >2. In the **Parameters** tab, choose your Microsoft Sentinel workspace from the **Log Analytics workspace** drop-down list, and leave marked as \""True\"" all the log and metric types you want to ingest.\n >3. To apply the policy on your existing resources, select the **Remediation tab** and mark the **Create a remediation task** checkbox."", ""instructions"": [{""parameters"": {""linkType"": ""OpenPolicyAssignment"", ""policyDefinitionGuid"": ""98a2e215-5382-489e-bd29-32e7190a39ba"", ""assignMode"": 1}, ""type"": ""InstallAgent""}]}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Policy\u200b"", ""description"": ""owner role assigned for each policy assignment scope.\u200b""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Network%20Security%20Groups/Data%20Connectors/AzureNSG.JSON","true" -"","Azure Resource Graph","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Resource%20Graph","azuresentinel","azure-sentinel-solution-resourcegraph","2025-06-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"","Azure Resource Graph","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Resource%20Graph","azuresentinel","azure-sentinel-solution-resourcegraph","2025-06-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AzureResourceGraph","Microsoft","Azure Resource Graph","Azure Resource Graph connector gives richer insights into Azure events by supplementing details about Azure subscriptions and Azure resources.","[{""description"": ""Connect Azure Resource Graph to Microsoft Sentinel"", ""instructions"": [{""parameters"": {""description"": ""**Entities**"", ""items"": [{""label"": ""resources""}, {""label"": ""resourcecontainers""}, {""label"": ""authorizationresources""}]}, ""type"": ""MSG_test""}]}]","{""customs"": [{""name"": ""Policy"", ""description"": ""Owner role permission on Azure subscriptions""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Resource%20Graph/Data%20Connectors/AzureResourceGraph_DataConnectorDefinition.json","true" -"","Azure SQL Database solution for sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20SQL%20Database%20solution%20for%20sentinel","sentinel4sql","sentinel4sql","2022-08-19","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"AzureDiagnostics","Azure SQL Database solution for sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20SQL%20Database%20solution%20for%20sentinel","sentinel4sql","sentinel4sql","2022-08-19","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureSql","Microsoft","Azure SQL Databases","Azure SQL is a fully managed, Platform-as-a-Service (PaaS) database engine that handles most database management functions, such as upgrading, patching, backups, and monitoring, without necessitating user involvement. This connector lets you stream your Azure SQL databases audit and diagnostic logs into Microsoft Sentinel, allowing you to continuously monitor activity in all your instances.","[{""title"": ""Connect your Azure SQL databases diagnostics logs into Sentinel."", ""description"": ""This connector uses Azure Policy to apply a single Azure SQL Database log-streaming configuration to a collection of instances, defined as a scope. \nFollow the instructions below to create and apply a policy to all current and future instances. **Note**, you may already have an active policy for this resource type."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Stream diagnostics logs from your Azure SQL Databases at scale"", ""innerSteps"": [{""title"": ""Launch the Azure Policy Assignment wizard and follow the steps.\u200b"", ""description"": "">1. In the **Basics** tab, click the button with the three dots under **Scope** to select your resources assignment scope.\n >2. In the **Parameters** tab, choose your Microsoft Sentinel workspace from the **Log Analytics workspace** drop-down list, and leave marked as \""True\"" all the log and metric types you want to ingest.\n >3. To apply the policy on your existing resources, select the **Remediation tab** and mark the **Create a remediation task** checkbox."", ""instructions"": [{""parameters"": {""linkType"": ""OpenPolicyAssignment"", ""policyDefinitionGuid"": ""b79fa14e-238a-4c2d-b376-442ce508fc84"", ""assignMode"": 1}, ""type"": ""InstallAgent""}]}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""Stream audit logs from your Azure SQL Databases at the server level at scale"", ""innerSteps"": [{""title"": ""Launch the Azure Policy Assignment wizard and follow the steps.\u200b"", ""description"": "">1. In the **Basics** tab, click the button with the three dots under **Scope** to select your resources assignment scope.\n >2. In the **Parameters** tab, choose your Microsoft Sentinel workspace from the **Log Analytics workspace** drop-down list, and leave marked as \""True\"" all the log and metric types you want to ingest.\n >3. To apply the policy on your existing resources, select the **Remediation tab** and mark the **Create a remediation task** checkbox."", ""instructions"": [{""parameters"": {""linkType"": ""OpenPolicyAssignment"", ""policyDefinitionGuid"": ""7ea8a143-05e3-4553-abfe-f56bef8b0b70"", ""assignMode"": 1}, ""type"": ""InstallAgent""}]}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Policy\u200b"", ""description"": ""owner role assigned for each policy assignment scope.\u200b""}, {""name"": ""Auditing"", ""description"": ""read and write permissions to Azure SQL Server audit settings.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20SQL%20Database%20solution%20for%20sentinel/Data%20Connectors/template_AzureSql.JSON","true" -"","Azure Service Bus","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Service%20Bus","azuresentinel","azure-sentinel-solution-servicebus","2022-06-29","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"AzureDiagnostics","Azure Service Bus","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Service%20Bus","azuresentinel","azure-sentinel-solution-servicebus","2022-06-29","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AzureServiceBus_CCP","Microsoft","Azure Service Bus","Azure Service Bus is a fully managed enterprise message broker with message queues and publish-subscribe topics (in a namespace). This connector lets you stream your Azure Service Bus diagnostics logs into Microsoft Sentinel, allowing you to continuously monitor activity. ","[{""title"": ""Connect your Azure Service Bus diagnostics logs into Sentinel."", ""description"": ""This connector uses Azure Policy to apply a single Azure Service Bus log-streaming configuration to a collection of instances, defined as a scope. Follow the instructions below to create and apply a policy to all current and future instances. Note, you may already have an active policy for this resource type."", ""instructions"": [{""parameters"": {""noFxPadding"": ""1"", ""expanded"": ""1"", ""instructionSteps"": [{""title"": ""Stream diagnostics logs from your Azure Service Bus at scale"", ""innerSteps"": [{""title"": ""Launch the Azure Policy Assignment wizard and follow the steps. "", ""description"": ""> 1. In the **Basics** tab, click the button with the three dots under **Scope** to select your subscription.
2. In the **Parameters** tab, choose your Microsoft Sentinel workspace from the **Log Analytics workspace** drop-down list, and leave marked as \""True\"" all the log categories you want to ingest.
3. To apply the policy on your existing resources, mark the **Create a remediation task** check box in the **Remediation** tab."", ""instructions"": [{""parameters"": {""policyDefinitionGuid"": ""04d53d87-841c-4f23-8a5b-21564380b55e""}, ""type"": ""PolicyAssignment""}]}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Policy"", ""description"": ""owner role assigned for each policy assignment scope""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Service%20Bus/Data%20Connectors/AzureServiceBus_CCP.JSON","true" -"","Azure Storage","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Storage","azuresentinel","azure-sentinel-solution-azurestorageaccount","2022-05-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"","Azure Stream Analytics","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Stream%20Analytics","azuresentinel","azure-sentinel-solution-streamanalytics","2022-06-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"AzureDiagnostics","Azure Stream Analytics","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Stream%20Analytics","azuresentinel","azure-sentinel-solution-streamanalytics","2022-06-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AzureStreamAnalytics_CCP","Microsoft","Azure Stream Analytics","Azure Stream Analytics is a real-time analytics and complex event-processing engine that is designed to analyze and process high volumes of fast streaming data from multiple sources simultaneously. This connector lets you stream your Azure Stream Analytics hub diagnostics logs into Microsoft Sentinel, allowing you to continuously monitor activity. ","[{""title"": ""Connect your Azure Stream Analytics diagnostics logs into Sentinel."", ""description"": ""This connector uses Azure Policy to apply a single Azure Stream Analytics log-streaming configuration to a collection of instances, defined as a scope. Follow the instructions below to create and apply a policy to all current and future instances. Note, you may already have an active policy for this resource type."", ""instructions"": [{""parameters"": {""noFxPadding"": ""1"", ""expanded"": ""1"", ""instructionSteps"": [{""title"": ""Stream diagnostics logs from your Azure Stream Analytics at scale"", ""innerSteps"": [{""title"": ""Launch the Azure Policy Assignment wizard and follow the steps. "", ""description"": ""> 1. In the **Basics** tab, click the button with the three dots under **Scope** to select your subscription.
2. In the **Parameters** tab, choose your Microsoft Sentinel workspace from the **Log Analytics workspace** drop-down list, and leave marked as \""True\"" all the log categories you want to ingest.
3. To apply the policy on your existing resources, mark the **Create a remediation task** check box in the **Remediation** tab."", ""instructions"": [{""parameters"": {""policyDefinitionGuid"": ""237e0f7e-b0e8-4ec4-ad46-8c12cb66d673""}, ""type"": ""PolicyAssignment""}]}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Policy"", ""description"": ""owner role assigned for each policy assignment scope""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Stream%20Analytics/Data%20Connectors/AzureStreamAnalytics_CCP.JSON","true" -"","Azure Web Application Firewall (WAF)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Web%20Application%20Firewall%20%28WAF%29","azuresentinel","azure-sentinel-solution-azurewebapplicationfirewal","2022-05-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"AzureDiagnostics","Azure Web Application Firewall (WAF)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Web%20Application%20Firewall%20%28WAF%29","azuresentinel","azure-sentinel-solution-azurewebapplicationfirewal","2022-05-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","WAF","Microsoft","Azure Web Application Firewall (WAF)","Connect to the Azure Web Application Firewall (WAF) for Application Gateway, Front Door, or CDN. This WAF protects your applications from common web vulnerabilities such as SQL injection and cross-site scripting, and lets you customize rules to reduce false positives. Follow these instructions to stream your Microsoft Web application firewall logs into Microsoft Sentinel. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2223546&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Connect Azure WAF to Microsoft Sentinel"", ""description"": ""Go to each WAF resource type and choose your WAF."", ""instructions"": [{""parameters"": {""linkType"": ""OpenWaf""}, ""type"": ""InstallAgent""}, {""parameters"": {""linkType"": ""OpenFrontDoors""}, ""type"": ""InstallAgent""}, {""parameters"": {""linkType"": ""OpenCdnProfile""}, ""type"": ""InstallAgent""}]}, {""title"": """", ""description"": ""Inside your WAF resource:\n\n1. Select **Diagnostic logs.\u200b**\n2. Select **+ Add diagnostic setting.\u200b**\n3. In the **Diagnostic setting** blade:\n - Type a **Name**.\n - Select **Send to Log Analytics**.\n - Choose the log destination workspace.\u200b\n - Select the categories that you want to analyze (recommended: ApplicationGatewayAccessLog, ApplicationGatewayFirewallLog, FrontdoorAccessLog, FrontdoorWebApplicationFirewallLog, WebApplicationFirewallLogs).\u200b\n - Click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Web%20Application%20Firewall%20%28WAF%29/Data%20Connectors/template_WAF.JSON","true" -"","Azure kubernetes Service","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20kubernetes%20Service","azuresentinel","azure-sentinel-solution-azurekubernetes","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"AzureDiagnostics","Azure kubernetes Service","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20kubernetes%20Service","azuresentinel","azure-sentinel-solution-azurekubernetes","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AzureKubernetes","Microsoft","Azure Kubernetes Service (AKS)","Azure Kubernetes Service (AKS) is an open-source, fully-managed container orchestration service that allows you to deploy, scale, and manage Docker containers and container-based applications in a cluster environment. This connector lets you stream your Azure Kubernetes Service (AKS) diagnostics logs into Microsoft Sentinel, allowing you to continuously monitor activity in all your instances. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2219762&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Connect your Azure Kubernetes Service (AKS) diagnostics logs into Sentinel."", ""description"": ""This connector uses Azure Policy to apply a single Azure Kubernetes Service log-streaming configuration to a collection of instances, defined as a scope.\nFollow the instructions below to create and apply a policy to all current and future instances. **Note**, you may already have an active policy for this resource type."", ""instructions"": [{""parameters"": {""noFxPadding"": true, ""expanded"": true, ""instructionSteps"": [{""title"": ""Stream diagnostics logs from your Azure Kubernetes Service (AKS) at scale"", ""description"": null, ""innerSteps"": [{""title"": ""Launch the Azure Policy Assignment wizard and follow the steps."", ""description"": "">1. In the **Basics** tab, click the button with the three dots under **Scope** to select your resources assignment scope.\n >2. In the **Parameters** tab, choose your Microsoft Sentinel workspace from the **Log Analytics workspace** drop-down list, and leave marked as \""True\"" all the log and metric types you want to ingest.\n >3. To apply the policy on your existing resources, select the **Remediation tab** and mark the **Create a remediation task** checkbox."", ""instructions"": [{""parameters"": {""linkType"": ""OpenPolicyAssignment"", ""policyDefinitionGuid"": ""6c66c325-74c8-42fd-a286-a74b0e2939d8"", ""assignMode"": 1}, ""type"": ""InstallAgent""}]}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Policy"", ""description"": ""owner role assigned for each policy assignment scope.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20kubernetes%20Service/Data%20Connectors/AzureKubernetes.JSON","true" -"ContainerInventory","Azure kubernetes Service","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20kubernetes%20Service","azuresentinel","azure-sentinel-solution-azurekubernetes","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AzureKubernetes","Microsoft","Azure Kubernetes Service (AKS)","Azure Kubernetes Service (AKS) is an open-source, fully-managed container orchestration service that allows you to deploy, scale, and manage Docker containers and container-based applications in a cluster environment. This connector lets you stream your Azure Kubernetes Service (AKS) diagnostics logs into Microsoft Sentinel, allowing you to continuously monitor activity in all your instances. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2219762&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Connect your Azure Kubernetes Service (AKS) diagnostics logs into Sentinel."", ""description"": ""This connector uses Azure Policy to apply a single Azure Kubernetes Service log-streaming configuration to a collection of instances, defined as a scope.\nFollow the instructions below to create and apply a policy to all current and future instances. **Note**, you may already have an active policy for this resource type."", ""instructions"": [{""parameters"": {""noFxPadding"": true, ""expanded"": true, ""instructionSteps"": [{""title"": ""Stream diagnostics logs from your Azure Kubernetes Service (AKS) at scale"", ""description"": null, ""innerSteps"": [{""title"": ""Launch the Azure Policy Assignment wizard and follow the steps."", ""description"": "">1. In the **Basics** tab, click the button with the three dots under **Scope** to select your resources assignment scope.\n >2. In the **Parameters** tab, choose your Microsoft Sentinel workspace from the **Log Analytics workspace** drop-down list, and leave marked as \""True\"" all the log and metric types you want to ingest.\n >3. To apply the policy on your existing resources, select the **Remediation tab** and mark the **Create a remediation task** checkbox."", ""instructions"": [{""parameters"": {""linkType"": ""OpenPolicyAssignment"", ""policyDefinitionGuid"": ""6c66c325-74c8-42fd-a286-a74b0e2939d8"", ""assignMode"": 1}, ""type"": ""InstallAgent""}]}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Policy"", ""description"": ""owner role assigned for each policy assignment scope.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20kubernetes%20Service/Data%20Connectors/AzureKubernetes.JSON","true" -"","AzureDevOpsAuditing","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AzureDevOpsAuditing","azuresentinel","azure-sentinel-solution-azuredevopsauditing","2022-09-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"ADOAuditLogs_CL","AzureDevOpsAuditing","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AzureDevOpsAuditing","azuresentinel","azure-sentinel-solution-azuredevopsauditing","2022-09-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureDevOpsAuditLogs","Microsoft","Azure DevOps Audit Logs (via Codeless Connector Platform)","The Azure DevOps Audit Logs data connector allows you to ingest audit events from Azure DevOps into Microsoft Sentinel. This data connector is built using the Microsoft Sentinel Codeless Connector Platform, ensuring seamless integration. It leverages the Azure DevOps Audit Logs API to fetch detailed audit events and supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview). These transformations enable parsing of the received audit data into a custom table during ingestion, improving query performance by eliminating the need for additional parsing. By using this connector, you can gain enhanced visibility into your Azure DevOps environment and streamline your security operations.","[{""title"": ""Connect to Azure DevOps to start collecting Audit logs in Microsoft Sentinel.\n"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""\n1. Enter the App you have registered.\n 2. In the 'Overview' section, copy the Application (client) ID.\n 3. Select the 'Endpoints' button, and copy the 'OAuth 2.0 authorization endpoint (v2)' value and the 'OAuth 2.0 token endpoint (v2)' value.\n 4. In the 'Certificates & secrets' section, copy the 'Client Secret value', and store it securely.\n5. Provide the required information below and click 'Connect'.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Token Endpoint"", ""name"": ""tokenEndpoint"", ""placeholder"": ""https://login.microsoftonline.com/{TenantId}/oauth2/v2.0/token"", ""type"": ""text"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Authorization Endpoint"", ""name"": ""authorizationEndpoint"", ""placeholder"": ""https://login.microsoftonline.com/{TenantId}/oauth2/v2.0/authorize"", ""type"": ""text"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Endpoint"", ""name"": ""apiEndpoint"", ""placeholder"": ""https://auditservice.dev.azure.com/{organizationName}/_apis/audit/auditlog?api-version=7.2-preview"", ""type"": ""text"", ""validations"": {""required"": true}}}, {""type"": ""OAuthForm"", ""parameters"": {""clientIdLabel"": ""App Client ID"", ""clientSecretLabel"": ""App Client Secret"", ""connectButtonLabel"": ""Connect"", ""disconnectButtonLabel"": ""Disconnect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure DevOps Prerequisite"", ""description"": ""Please ensure the following:
1. Register an Entra App in Microsoft Entra Admin Center under App Registrations.
2. In 'API permissions' - add Permissions to 'Azure DevOps - vso.auditlog'.
3. In 'Certificates & secrets' - generate 'Client secret'.
4. In 'Authentication' - add Redirect URI: 'https://portal.azure.com/TokenAuthorize/ExtensionName/Microsoft_Azure_Security_Insights'.
5. In the Azure DevOps settings - enable audit log and set **View audit log** for the user. [Azure DevOps Auditing](https://learn.microsoft.com/en-us/azure/devops/organizations/audit/azure-devops-auditing?view=azure-devops&tabs=preview-page).
6. Ensure the user assigned to connect the data connector has the View audit logs permission explicitly set to Allow at all times. This permission is essential for successful log ingestion. If the permission is revoked or not granted, data ingestion will fail or be interrupted.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AzureDevOpsAuditing/Data%20Connectors/AzureDevOpsAuditLogs_CCP/AzureDevOpsAuditLogs_DataConnectorDefinition.json","true" -"","AzureSecurityBenchmark","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AzureSecurityBenchmark","azuresentinel","azure-sentinel-solution-azuresecuritybenchmark","2022-06-17","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"","BETTER Mobile Threat Defense (MTD)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BETTER%20Mobile%20Threat%20Defense%20%28MTD%29","bettermobilesecurityinc","better_mtd_mss","2022-05-02","","","Better Mobile Security Inc.","Partner","https://www.better.mobi/about#contact-us","","domains","","","","","","","false","","false" -"BetterMTDAppLog_CL","BETTER Mobile Threat Defense (MTD)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BETTER%20Mobile%20Threat%20Defense%20%28MTD%29","bettermobilesecurityinc","better_mtd_mss","2022-05-02","","","Better Mobile Security Inc.","Partner","https://www.better.mobi/about#contact-us","","domains","BetterMTD","BETTER Mobile","BETTER Mobile Threat Defense (MTD)","The BETTER MTD Connector allows Enterprises to connect their Better MTD instances with Microsoft Sentinel, to view their data in Dashboards, create custom alerts, use it to trigger playbooks and expands threat hunting capabilities. This gives users more insight into their organization's mobile devices and ability to quickly analyze current mobile security posture which improves their overall SecOps capabilities.","[{""title"": """", ""description"": ""1. In **Better MTD Console**, click on **Integration** on the side bar.\n2. Select **Others** tab.\n3. Click the **ADD ACCOUNT** button and Select **Microsoft Sentinel** from the available integrations.\n4. Create the Integration:\n - set `ACCOUNT NAME` to a descriptive name that identifies the integration then click **Next**\n - Enter your `WORKSPACE ID` and `PRIMARY KEY` from the fields below, click **Save**\n - Click **Done**\n5. Threat Policy setup (Which Incidents should be reported to `Microsoft Sentinel`):\n - In **Better MTD Console**, click on **Policies** on the side bar\n - Click on the **Edit** button of the Policy that you are using.\n - For each Incident types that you want to be logged go to **Send to Integrations** field and select **Sentinel**\n6. For additional information, please refer to our [Documentation](https://mtd-docs.bmobi.net/integrations/how-to-setup-azure-sentinel-integration#mtd-integration-configuration)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BETTER%20Mobile%20Threat%20Defense%20%28MTD%29/Data%20Connectors/BETTERMTD.json","true" -"BetterMTDDeviceLog_CL","BETTER Mobile Threat Defense (MTD)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BETTER%20Mobile%20Threat%20Defense%20%28MTD%29","bettermobilesecurityinc","better_mtd_mss","2022-05-02","","","Better Mobile Security Inc.","Partner","https://www.better.mobi/about#contact-us","","domains","BetterMTD","BETTER Mobile","BETTER Mobile Threat Defense (MTD)","The BETTER MTD Connector allows Enterprises to connect their Better MTD instances with Microsoft Sentinel, to view their data in Dashboards, create custom alerts, use it to trigger playbooks and expands threat hunting capabilities. This gives users more insight into their organization's mobile devices and ability to quickly analyze current mobile security posture which improves their overall SecOps capabilities.","[{""title"": """", ""description"": ""1. In **Better MTD Console**, click on **Integration** on the side bar.\n2. Select **Others** tab.\n3. Click the **ADD ACCOUNT** button and Select **Microsoft Sentinel** from the available integrations.\n4. Create the Integration:\n - set `ACCOUNT NAME` to a descriptive name that identifies the integration then click **Next**\n - Enter your `WORKSPACE ID` and `PRIMARY KEY` from the fields below, click **Save**\n - Click **Done**\n5. Threat Policy setup (Which Incidents should be reported to `Microsoft Sentinel`):\n - In **Better MTD Console**, click on **Policies** on the side bar\n - Click on the **Edit** button of the Policy that you are using.\n - For each Incident types that you want to be logged go to **Send to Integrations** field and select **Sentinel**\n6. For additional information, please refer to our [Documentation](https://mtd-docs.bmobi.net/integrations/how-to-setup-azure-sentinel-integration#mtd-integration-configuration)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BETTER%20Mobile%20Threat%20Defense%20%28MTD%29/Data%20Connectors/BETTERMTD.json","true" -"BetterMTDIncidentLog_CL","BETTER Mobile Threat Defense (MTD)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BETTER%20Mobile%20Threat%20Defense%20%28MTD%29","bettermobilesecurityinc","better_mtd_mss","2022-05-02","","","Better Mobile Security Inc.","Partner","https://www.better.mobi/about#contact-us","","domains","BetterMTD","BETTER Mobile","BETTER Mobile Threat Defense (MTD)","The BETTER MTD Connector allows Enterprises to connect their Better MTD instances with Microsoft Sentinel, to view their data in Dashboards, create custom alerts, use it to trigger playbooks and expands threat hunting capabilities. This gives users more insight into their organization's mobile devices and ability to quickly analyze current mobile security posture which improves their overall SecOps capabilities.","[{""title"": """", ""description"": ""1. In **Better MTD Console**, click on **Integration** on the side bar.\n2. Select **Others** tab.\n3. Click the **ADD ACCOUNT** button and Select **Microsoft Sentinel** from the available integrations.\n4. Create the Integration:\n - set `ACCOUNT NAME` to a descriptive name that identifies the integration then click **Next**\n - Enter your `WORKSPACE ID` and `PRIMARY KEY` from the fields below, click **Save**\n - Click **Done**\n5. Threat Policy setup (Which Incidents should be reported to `Microsoft Sentinel`):\n - In **Better MTD Console**, click on **Policies** on the side bar\n - Click on the **Edit** button of the Policy that you are using.\n - For each Incident types that you want to be logged go to **Send to Integrations** field and select **Sentinel**\n6. For additional information, please refer to our [Documentation](https://mtd-docs.bmobi.net/integrations/how-to-setup-azure-sentinel-integration#mtd-integration-configuration)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BETTER%20Mobile%20Threat%20Defense%20%28MTD%29/Data%20Connectors/BETTERMTD.json","true" -"BetterMTDNetflowLog_CL","BETTER Mobile Threat Defense (MTD)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BETTER%20Mobile%20Threat%20Defense%20%28MTD%29","bettermobilesecurityinc","better_mtd_mss","2022-05-02","","","Better Mobile Security Inc.","Partner","https://www.better.mobi/about#contact-us","","domains","BetterMTD","BETTER Mobile","BETTER Mobile Threat Defense (MTD)","The BETTER MTD Connector allows Enterprises to connect their Better MTD instances with Microsoft Sentinel, to view their data in Dashboards, create custom alerts, use it to trigger playbooks and expands threat hunting capabilities. This gives users more insight into their organization's mobile devices and ability to quickly analyze current mobile security posture which improves their overall SecOps capabilities.","[{""title"": """", ""description"": ""1. In **Better MTD Console**, click on **Integration** on the side bar.\n2. Select **Others** tab.\n3. Click the **ADD ACCOUNT** button and Select **Microsoft Sentinel** from the available integrations.\n4. Create the Integration:\n - set `ACCOUNT NAME` to a descriptive name that identifies the integration then click **Next**\n - Enter your `WORKSPACE ID` and `PRIMARY KEY` from the fields below, click **Save**\n - Click **Done**\n5. Threat Policy setup (Which Incidents should be reported to `Microsoft Sentinel`):\n - In **Better MTD Console**, click on **Policies** on the side bar\n - Click on the **Edit** button of the Policy that you are using.\n - For each Incident types that you want to be logged go to **Send to Integrations** field and select **Sentinel**\n6. For additional information, please refer to our [Documentation](https://mtd-docs.bmobi.net/integrations/how-to-setup-azure-sentinel-integration#mtd-integration-configuration)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BETTER%20Mobile%20Threat%20Defense%20%28MTD%29/Data%20Connectors/BETTERMTD.json","true" -"","Barracuda CloudGen Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Barracuda%20CloudGen%20Firewall","microsoftsentinelcommunity","azure-sentinel-solution-barracudacloudgenfirewall","2021-05-02","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","","","","","","","false","","false" -"Syslog","Barracuda CloudGen Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Barracuda%20CloudGen%20Firewall","microsoftsentinelcommunity","azure-sentinel-solution-barracudacloudgenfirewall","2021-05-02","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","BarracudaCloudFirewall","Barracuda","[Deprecated] Barracuda CloudGen Firewall","The Barracuda CloudGen Firewall (CGFW) connector allows you to easily connect your Barracuda CGFW logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias CGFWFirewallActivity and load the function code or click [here](https://aka.ms/sentinel-barracudacloudfirewall-parser). The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n2. Select **Apply below configuration to my machines** and select the facilities and severities.\n3. Click **Save**.""}, {""title"": ""Configure and connect the Barracuda CloudGen Firewall"", ""description"": ""[Follow instructions](https://aka.ms/sentinel-barracudacloudfirewall-connector) to configure syslog streaming. Use the IP address or hostname for the Linux machine with the Microsoft Sentinel agent installed for the Destination IP address."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Barracuda CloudGen Firewall"", ""description"": ""must be configured to export logs via Syslog""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Barracuda%20CloudGen%20Firewall/Data%20Connectors/template_BarracudaCloudFirewall.json","true" -"","Barracuda WAF","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Barracuda%20WAF","barracudanetworks","barracuda_web_application_firewall_mss","2022-05-13","","","Barracuda","Partner","https://www.barracuda.com/support","","domains","","","","","","","false","","false" -"Barracuda_CL","Barracuda WAF","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Barracuda%20WAF","barracudanetworks","barracuda_web_application_firewall_mss","2022-05-13","","","Barracuda","Partner","https://www.barracuda.com/support","","domains","Barracuda","Barracuda","[Deprecated] Barracuda Web Application Firewall via Legacy Agent","The Barracuda Web Application Firewall (WAF) connector allows you to easily connect your Barracuda logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization’s network and improves your security operation capabilities.

[For more information >​](https://aka.ms/CEF-Barracuda)","[{""title"": ""Configure and connect Barracuda WAF"", ""description"": ""The Barracuda Web Application Firewall can integrate with and export logs directly to Microsoft Sentinel via Azure OMS Server.\u200b\n\n1. Go to [Barracuda WAF configuration](https://aka.ms/asi-barracuda-connector), and follow the instructions, using the parameters below to set up the connection:.\n\n2. Web Firewall logs facility: Go to the advanced settings (link below) for your workspace and on the **Data > Syslog** tabs, make sure that the facility exists.\u200b\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}, {""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Barracuda%20WAF/Data%20Connectors/template_Barracuda.json","true" -"CommonSecurityLog","Barracuda WAF","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Barracuda%20WAF","barracudanetworks","barracuda_web_application_firewall_mss","2022-05-13","","","Barracuda","Partner","https://www.barracuda.com/support","","domains","Barracuda","Barracuda","[Deprecated] Barracuda Web Application Firewall via Legacy Agent","The Barracuda Web Application Firewall (WAF) connector allows you to easily connect your Barracuda logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization’s network and improves your security operation capabilities.

[For more information >​](https://aka.ms/CEF-Barracuda)","[{""title"": ""Configure and connect Barracuda WAF"", ""description"": ""The Barracuda Web Application Firewall can integrate with and export logs directly to Microsoft Sentinel via Azure OMS Server.\u200b\n\n1. Go to [Barracuda WAF configuration](https://aka.ms/asi-barracuda-connector), and follow the instructions, using the parameters below to set up the connection:.\n\n2. Web Firewall logs facility: Go to the advanced settings (link below) for your workspace and on the **Data > Syslog** tabs, make sure that the facility exists.\u200b\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}, {""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Barracuda%20WAF/Data%20Connectors/template_Barracuda.json","true" -"barracuda_CL","Barracuda WAF","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Barracuda%20WAF","barracudanetworks","barracuda_web_application_firewall_mss","2022-05-13","","","Barracuda","Partner","https://www.barracuda.com/support","","domains","Barracuda","Barracuda","[Deprecated] Barracuda Web Application Firewall via Legacy Agent","The Barracuda Web Application Firewall (WAF) connector allows you to easily connect your Barracuda logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization’s network and improves your security operation capabilities.

[For more information >​](https://aka.ms/CEF-Barracuda)","[{""title"": ""Configure and connect Barracuda WAF"", ""description"": ""The Barracuda Web Application Firewall can integrate with and export logs directly to Microsoft Sentinel via Azure OMS Server.\u200b\n\n1. Go to [Barracuda WAF configuration](https://aka.ms/asi-barracuda-connector), and follow the instructions, using the parameters below to set up the connection:.\n\n2. Web Firewall logs facility: Go to the advanced settings (link below) for your workspace and on the **Data > Syslog** tabs, make sure that the facility exists.\u200b\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}, {""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Barracuda%20WAF/Data%20Connectors/template_Barracuda.json","true" -"","Beyond Security beSECURE","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Beyond%20Security%20beSECURE","azuresentinel","azure-sentinel-solution-isvtesting12","2022-05-02","","","Beyond Security","Partner","https://beyondsecurity.freshdesk.com/support/home","","domains","","","","","","","false","","false" -"beSECURE_Audit_CL","Beyond Security beSECURE","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Beyond%20Security%20beSECURE","azuresentinel","azure-sentinel-solution-isvtesting12","2022-05-02","","","Beyond Security","Partner","https://beyondsecurity.freshdesk.com/support/home","","domains","BeyondSecuritybeSECURE","Beyond Security","Beyond Security beSECURE","The [Beyond Security beSECURE](https://beyondsecurity.com/) connector allows you to easily connect your Beyond Security beSECURE scan events, scan results and audit trail with Azure Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": ""Configure beSECURE"", ""description"": ""Follow the steps below to configure your beSECURE solution to send out scan results, scan status and audit trail to Azure Sentinel."", ""innerSteps"": [{""title"": ""1. Access the Integration menu"", ""description"": ""1.1 Click on the 'More' menu option\n\n1.2 Select Server\n\n1.3 Select Integration\n\n1.4 Enable Azure Sentinel""}, {""title"": ""2. Provide Azure Sentinel settings"", ""description"": ""Fill in the Workspace ID and Primary Key values, click 'Modify'"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Beyond%20Security%20beSECURE/Data%20Connectors/Beyond%20Security%20beSECURE.json","true" -"beSECURE_ScanEvent_CL","Beyond Security beSECURE","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Beyond%20Security%20beSECURE","azuresentinel","azure-sentinel-solution-isvtesting12","2022-05-02","","","Beyond Security","Partner","https://beyondsecurity.freshdesk.com/support/home","","domains","BeyondSecuritybeSECURE","Beyond Security","Beyond Security beSECURE","The [Beyond Security beSECURE](https://beyondsecurity.com/) connector allows you to easily connect your Beyond Security beSECURE scan events, scan results and audit trail with Azure Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": ""Configure beSECURE"", ""description"": ""Follow the steps below to configure your beSECURE solution to send out scan results, scan status and audit trail to Azure Sentinel."", ""innerSteps"": [{""title"": ""1. Access the Integration menu"", ""description"": ""1.1 Click on the 'More' menu option\n\n1.2 Select Server\n\n1.3 Select Integration\n\n1.4 Enable Azure Sentinel""}, {""title"": ""2. Provide Azure Sentinel settings"", ""description"": ""Fill in the Workspace ID and Primary Key values, click 'Modify'"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Beyond%20Security%20beSECURE/Data%20Connectors/Beyond%20Security%20beSECURE.json","true" -"beSECURE_ScanResults_CL","Beyond Security beSECURE","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Beyond%20Security%20beSECURE","azuresentinel","azure-sentinel-solution-isvtesting12","2022-05-02","","","Beyond Security","Partner","https://beyondsecurity.freshdesk.com/support/home","","domains","BeyondSecuritybeSECURE","Beyond Security","Beyond Security beSECURE","The [Beyond Security beSECURE](https://beyondsecurity.com/) connector allows you to easily connect your Beyond Security beSECURE scan events, scan results and audit trail with Azure Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": ""Configure beSECURE"", ""description"": ""Follow the steps below to configure your beSECURE solution to send out scan results, scan status and audit trail to Azure Sentinel."", ""innerSteps"": [{""title"": ""1. Access the Integration menu"", ""description"": ""1.1 Click on the 'More' menu option\n\n1.2 Select Server\n\n1.3 Select Integration\n\n1.4 Enable Azure Sentinel""}, {""title"": ""2. Provide Azure Sentinel settings"", ""description"": ""Fill in the Workspace ID and Primary Key values, click 'Modify'"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Beyond%20Security%20beSECURE/Data%20Connectors/Beyond%20Security%20beSECURE.json","true" -"","BigID","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BigID","bigid","azure-sentinel-solution-bigid","2025-10-07","","","BigID","Partner","https://www.bigid.com/support","","domains","","","","","","","false","","false" -"BigIDDSPMCatalog_CL","BigID","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BigID","bigid","azure-sentinel-solution-bigid","2025-10-07","","","BigID","Partner","https://www.bigid.com/support","","domains","BigIDDSPMLogsConnectorDefinition","BigID","BigID DSPM connector","The [BigID DSPM](https://bigid.com/data-security-posture-management/) data connector provides the capability to ingest BigID DSPM cases with affected objects and datasource information into Microsoft Sentinel.","[{""description"": ""Provide your BigID domain name like 'customer.bigid.cloud' and your BigID token. Generate a token in the BigID console via Settings -> Access Management -> Users -> Select User and generate a token."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""BigID FQDN"", ""placeholder"": ""BigID FQDN"", ""type"": ""text"", ""name"": ""bigidFqdn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""BigID Token"", ""placeholder"": ""BigID Token"", ""type"": ""password"", ""name"": ""bigidToken"", ""validations"": {""required"": true}}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""title"": ""Connect to BigID DSPM API to start collecting BigID DSPM cases and affected Objects in Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""BigID DSPM API access"", ""description"": ""Access to the BigID DSPM API through a BigID Token is required.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BigID/Data%20Connectors/BigIDDSPMLogs_ccp/BigIDDSPMLogs_connectorDefinition.json","true" -"","BitSight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight","bitsighttechnologiesinc1695119434818","bitsight_v1","2023-02-20","2024-02-20","","BitSight Support","Partner","https://www.bitsight.com/customer-success-support","","domains","","","","","","","false","","false" -"BitsightAlerts_data_CL","BitSight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight","bitsighttechnologiesinc1695119434818","bitsight_v1","2023-02-20","2024-02-20","","BitSight Support","Partner","https://www.bitsight.com/customer-success-support","","domains","BitSight","BitSight Technologies, Inc.","Bitsight data connector","The [BitSight](https://www.BitSight.com/) Data Connector supports evidence-based cyber risk monitoring by bringing BitSight data in Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the BitSight API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create/Get Bitsight API Token**\n\n Follow these instructions to get a BitSight API Token.\n 1. For SPM App: Refer to the [User Preference](https://service.bitsight.com/app/spm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 2. For TPRM App: Refer to the [User Preference](https://service.bitsight.com/app/tprm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 3. For Classic BitSight: Go to your [Account](https://service.bitsight.com/settings) page, \n\t\tGo to Settings > Account > API Token.""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of BitSight Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the BitSight data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the BitSight API Token."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the BitSight connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-BitSight-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Review + create** to deploy..""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the BitSight data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-BitSight310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. BitSightXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""BitSight API Token is required. See the documentation to [learn more](https://help.bitsighttech.com/hc/en-us/articles/115014888388-API-Token-Management) about API Token.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight/Data%20Connectors/BitSightDataConnector/BitSight_API_FunctionApp.json","true" -"BitsightBreaches_data_CL","BitSight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight","bitsighttechnologiesinc1695119434818","bitsight_v1","2023-02-20","2024-02-20","","BitSight Support","Partner","https://www.bitsight.com/customer-success-support","","domains","BitSight","BitSight Technologies, Inc.","Bitsight data connector","The [BitSight](https://www.BitSight.com/) Data Connector supports evidence-based cyber risk monitoring by bringing BitSight data in Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the BitSight API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create/Get Bitsight API Token**\n\n Follow these instructions to get a BitSight API Token.\n 1. For SPM App: Refer to the [User Preference](https://service.bitsight.com/app/spm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 2. For TPRM App: Refer to the [User Preference](https://service.bitsight.com/app/tprm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 3. For Classic BitSight: Go to your [Account](https://service.bitsight.com/settings) page, \n\t\tGo to Settings > Account > API Token.""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of BitSight Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the BitSight data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the BitSight API Token."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the BitSight connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-BitSight-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Review + create** to deploy..""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the BitSight data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-BitSight310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. BitSightXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""BitSight API Token is required. See the documentation to [learn more](https://help.bitsighttech.com/hc/en-us/articles/115014888388-API-Token-Management) about API Token.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight/Data%20Connectors/BitSightDataConnector/BitSight_API_FunctionApp.json","true" -"BitsightCompany_details_CL","BitSight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight","bitsighttechnologiesinc1695119434818","bitsight_v1","2023-02-20","2024-02-20","","BitSight Support","Partner","https://www.bitsight.com/customer-success-support","","domains","BitSight","BitSight Technologies, Inc.","Bitsight data connector","The [BitSight](https://www.BitSight.com/) Data Connector supports evidence-based cyber risk monitoring by bringing BitSight data in Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the BitSight API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create/Get Bitsight API Token**\n\n Follow these instructions to get a BitSight API Token.\n 1. For SPM App: Refer to the [User Preference](https://service.bitsight.com/app/spm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 2. For TPRM App: Refer to the [User Preference](https://service.bitsight.com/app/tprm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 3. For Classic BitSight: Go to your [Account](https://service.bitsight.com/settings) page, \n\t\tGo to Settings > Account > API Token.""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of BitSight Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the BitSight data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the BitSight API Token."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the BitSight connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-BitSight-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Review + create** to deploy..""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the BitSight data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-BitSight310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. BitSightXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""BitSight API Token is required. See the documentation to [learn more](https://help.bitsighttech.com/hc/en-us/articles/115014888388-API-Token-Management) about API Token.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight/Data%20Connectors/BitSightDataConnector/BitSight_API_FunctionApp.json","true" -"BitsightCompany_rating_details_CL","BitSight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight","bitsighttechnologiesinc1695119434818","bitsight_v1","2023-02-20","2024-02-20","","BitSight Support","Partner","https://www.bitsight.com/customer-success-support","","domains","BitSight","BitSight Technologies, Inc.","Bitsight data connector","The [BitSight](https://www.BitSight.com/) Data Connector supports evidence-based cyber risk monitoring by bringing BitSight data in Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the BitSight API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create/Get Bitsight API Token**\n\n Follow these instructions to get a BitSight API Token.\n 1. For SPM App: Refer to the [User Preference](https://service.bitsight.com/app/spm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 2. For TPRM App: Refer to the [User Preference](https://service.bitsight.com/app/tprm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 3. For Classic BitSight: Go to your [Account](https://service.bitsight.com/settings) page, \n\t\tGo to Settings > Account > API Token.""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of BitSight Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the BitSight data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the BitSight API Token."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the BitSight connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-BitSight-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Review + create** to deploy..""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the BitSight data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-BitSight310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. BitSightXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""BitSight API Token is required. See the documentation to [learn more](https://help.bitsighttech.com/hc/en-us/articles/115014888388-API-Token-Management) about API Token.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight/Data%20Connectors/BitSightDataConnector/BitSight_API_FunctionApp.json","true" -"BitsightDiligence_historical_statistics_CL","BitSight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight","bitsighttechnologiesinc1695119434818","bitsight_v1","2023-02-20","2024-02-20","","BitSight Support","Partner","https://www.bitsight.com/customer-success-support","","domains","BitSight","BitSight Technologies, Inc.","Bitsight data connector","The [BitSight](https://www.BitSight.com/) Data Connector supports evidence-based cyber risk monitoring by bringing BitSight data in Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the BitSight API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create/Get Bitsight API Token**\n\n Follow these instructions to get a BitSight API Token.\n 1. For SPM App: Refer to the [User Preference](https://service.bitsight.com/app/spm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 2. For TPRM App: Refer to the [User Preference](https://service.bitsight.com/app/tprm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 3. For Classic BitSight: Go to your [Account](https://service.bitsight.com/settings) page, \n\t\tGo to Settings > Account > API Token.""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of BitSight Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the BitSight data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the BitSight API Token."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the BitSight connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-BitSight-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Review + create** to deploy..""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the BitSight data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-BitSight310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. BitSightXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""BitSight API Token is required. See the documentation to [learn more](https://help.bitsighttech.com/hc/en-us/articles/115014888388-API-Token-Management) about API Token.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight/Data%20Connectors/BitSightDataConnector/BitSight_API_FunctionApp.json","true" -"BitsightDiligence_statistics_CL","BitSight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight","bitsighttechnologiesinc1695119434818","bitsight_v1","2023-02-20","2024-02-20","","BitSight Support","Partner","https://www.bitsight.com/customer-success-support","","domains","BitSight","BitSight Technologies, Inc.","Bitsight data connector","The [BitSight](https://www.BitSight.com/) Data Connector supports evidence-based cyber risk monitoring by bringing BitSight data in Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the BitSight API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create/Get Bitsight API Token**\n\n Follow these instructions to get a BitSight API Token.\n 1. For SPM App: Refer to the [User Preference](https://service.bitsight.com/app/spm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 2. For TPRM App: Refer to the [User Preference](https://service.bitsight.com/app/tprm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 3. For Classic BitSight: Go to your [Account](https://service.bitsight.com/settings) page, \n\t\tGo to Settings > Account > API Token.""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of BitSight Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the BitSight data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the BitSight API Token."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the BitSight connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-BitSight-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Review + create** to deploy..""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the BitSight data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-BitSight310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. BitSightXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""BitSight API Token is required. See the documentation to [learn more](https://help.bitsighttech.com/hc/en-us/articles/115014888388-API-Token-Management) about API Token.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight/Data%20Connectors/BitSightDataConnector/BitSight_API_FunctionApp.json","true" -"BitsightFindings_data_CL","BitSight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight","bitsighttechnologiesinc1695119434818","bitsight_v1","2023-02-20","2024-02-20","","BitSight Support","Partner","https://www.bitsight.com/customer-success-support","","domains","BitSight","BitSight Technologies, Inc.","Bitsight data connector","The [BitSight](https://www.BitSight.com/) Data Connector supports evidence-based cyber risk monitoring by bringing BitSight data in Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the BitSight API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create/Get Bitsight API Token**\n\n Follow these instructions to get a BitSight API Token.\n 1. For SPM App: Refer to the [User Preference](https://service.bitsight.com/app/spm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 2. For TPRM App: Refer to the [User Preference](https://service.bitsight.com/app/tprm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 3. For Classic BitSight: Go to your [Account](https://service.bitsight.com/settings) page, \n\t\tGo to Settings > Account > API Token.""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of BitSight Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the BitSight data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the BitSight API Token."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the BitSight connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-BitSight-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Review + create** to deploy..""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the BitSight data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-BitSight310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. BitSightXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""BitSight API Token is required. See the documentation to [learn more](https://help.bitsighttech.com/hc/en-us/articles/115014888388-API-Token-Management) about API Token.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight/Data%20Connectors/BitSightDataConnector/BitSight_API_FunctionApp.json","true" -"BitsightFindings_summary_CL","BitSight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight","bitsighttechnologiesinc1695119434818","bitsight_v1","2023-02-20","2024-02-20","","BitSight Support","Partner","https://www.bitsight.com/customer-success-support","","domains","BitSight","BitSight Technologies, Inc.","Bitsight data connector","The [BitSight](https://www.BitSight.com/) Data Connector supports evidence-based cyber risk monitoring by bringing BitSight data in Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the BitSight API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create/Get Bitsight API Token**\n\n Follow these instructions to get a BitSight API Token.\n 1. For SPM App: Refer to the [User Preference](https://service.bitsight.com/app/spm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 2. For TPRM App: Refer to the [User Preference](https://service.bitsight.com/app/tprm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 3. For Classic BitSight: Go to your [Account](https://service.bitsight.com/settings) page, \n\t\tGo to Settings > Account > API Token.""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of BitSight Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the BitSight data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the BitSight API Token."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the BitSight connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-BitSight-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Review + create** to deploy..""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the BitSight data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-BitSight310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. BitSightXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""BitSight API Token is required. See the documentation to [learn more](https://help.bitsighttech.com/hc/en-us/articles/115014888388-API-Token-Management) about API Token.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight/Data%20Connectors/BitSightDataConnector/BitSight_API_FunctionApp.json","true" -"BitsightGraph_data_CL","BitSight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight","bitsighttechnologiesinc1695119434818","bitsight_v1","2023-02-20","2024-02-20","","BitSight Support","Partner","https://www.bitsight.com/customer-success-support","","domains","BitSight","BitSight Technologies, Inc.","Bitsight data connector","The [BitSight](https://www.BitSight.com/) Data Connector supports evidence-based cyber risk monitoring by bringing BitSight data in Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the BitSight API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create/Get Bitsight API Token**\n\n Follow these instructions to get a BitSight API Token.\n 1. For SPM App: Refer to the [User Preference](https://service.bitsight.com/app/spm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 2. For TPRM App: Refer to the [User Preference](https://service.bitsight.com/app/tprm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 3. For Classic BitSight: Go to your [Account](https://service.bitsight.com/settings) page, \n\t\tGo to Settings > Account > API Token.""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of BitSight Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the BitSight data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the BitSight API Token."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the BitSight connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-BitSight-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Review + create** to deploy..""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the BitSight data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-BitSight310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. BitSightXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""BitSight API Token is required. See the documentation to [learn more](https://help.bitsighttech.com/hc/en-us/articles/115014888388-API-Token-Management) about API Token.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight/Data%20Connectors/BitSightDataConnector/BitSight_API_FunctionApp.json","true" -"BitsightIndustrial_statistics_CL","BitSight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight","bitsighttechnologiesinc1695119434818","bitsight_v1","2023-02-20","2024-02-20","","BitSight Support","Partner","https://www.bitsight.com/customer-success-support","","domains","BitSight","BitSight Technologies, Inc.","Bitsight data connector","The [BitSight](https://www.BitSight.com/) Data Connector supports evidence-based cyber risk monitoring by bringing BitSight data in Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the BitSight API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create/Get Bitsight API Token**\n\n Follow these instructions to get a BitSight API Token.\n 1. For SPM App: Refer to the [User Preference](https://service.bitsight.com/app/spm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 2. For TPRM App: Refer to the [User Preference](https://service.bitsight.com/app/tprm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 3. For Classic BitSight: Go to your [Account](https://service.bitsight.com/settings) page, \n\t\tGo to Settings > Account > API Token.""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of BitSight Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the BitSight data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the BitSight API Token."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the BitSight connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-BitSight-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Review + create** to deploy..""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the BitSight data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-BitSight310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. BitSightXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""BitSight API Token is required. See the documentation to [learn more](https://help.bitsighttech.com/hc/en-us/articles/115014888388-API-Token-Management) about API Token.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight/Data%20Connectors/BitSightDataConnector/BitSight_API_FunctionApp.json","true" -"BitsightObservation_statistics_CL","BitSight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight","bitsighttechnologiesinc1695119434818","bitsight_v1","2023-02-20","2024-02-20","","BitSight Support","Partner","https://www.bitsight.com/customer-success-support","","domains","BitSight","BitSight Technologies, Inc.","Bitsight data connector","The [BitSight](https://www.BitSight.com/) Data Connector supports evidence-based cyber risk monitoring by bringing BitSight data in Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the BitSight API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create/Get Bitsight API Token**\n\n Follow these instructions to get a BitSight API Token.\n 1. For SPM App: Refer to the [User Preference](https://service.bitsight.com/app/spm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 2. For TPRM App: Refer to the [User Preference](https://service.bitsight.com/app/tprm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 3. For Classic BitSight: Go to your [Account](https://service.bitsight.com/settings) page, \n\t\tGo to Settings > Account > API Token.""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of BitSight Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the BitSight data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the BitSight API Token."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the BitSight connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-BitSight-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Review + create** to deploy..""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the BitSight data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-BitSight310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. BitSightXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""BitSight API Token is required. See the documentation to [learn more](https://help.bitsighttech.com/hc/en-us/articles/115014888388-API-Token-Management) about API Token.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight/Data%20Connectors/BitSightDataConnector/BitSight_API_FunctionApp.json","true" -"","Bitglass","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Bitglass","azuresentinel","azure-sentinel-solution-bitglass","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"BitglassLogs_CL","Bitglass","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Bitglass","azuresentinel","azure-sentinel-solution-bitglass","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","Bitglass","Bitglass","Bitglass","The [Bitglass](https://www.bitglass.com/) data connector provides the capability to retrieve security event logs of the Bitglass services and more events into Microsoft Sentinel through the REST API. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Azure Blob Storage API to pull logs into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Bitglass**](https://aka.ms/sentinel-bitglass-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Bitglass Log Retrieval API**\n\n Follow the instructions to obtain the credentials.\n\n1. Please contact Bitglass [support](https://pages.bitglass.com/Contact.html) and obtain the **BitglassToken** and **BitglassServiceURL** ntation].\n2. Save credentials for using in the data connector.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Bitglass data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Bitglass data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-bitglass-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **BitglassToken**, **BitglassServiceURL** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Bitglass data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-bitglass-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. BitglassXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tBitglassToken\n\t\tBitglassServiceURL\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**BitglassToken** and **BitglassServiceURL** are required for making API calls.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Bitglass/Data%20Connectors/Bitglass_API_FunctionApp.json","true" -"","Bitwarden","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Bitwarden","8bit-solutions-llc","bitwarden-sentinel-integration","2024-05-12","2024-10-02","","Bitwarden Inc","Partner","https://bitwarden.com","","domains","","","","","","","false","","false" -"BitwardenEventLogs_CL","Bitwarden","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Bitwarden","8bit-solutions-llc","bitwarden-sentinel-integration","2024-05-12","2024-10-02","","Bitwarden Inc","Partner","https://bitwarden.com","","domains","BitwardenEventLogs","Bitwarden Inc","Bitwarden Event Logs","This connector provides insight into activity of your Bitwarden organization such as user's activity (logged in, changed password, 2fa, etc.), cipher activity (created, updated, deleted, shared, etc.), collection activity, organization activity, and more.","[{""description"": ""Your API key can be found in the Bitwarden organization admin console.\nPlease see [Bitwarden documentation](https://bitwarden.com/help/public-api/#authentication) for more information.\nSelf-hosted Bitwarden servers may need to reconfigure their installation's URL."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Bitwarden Identity Url"", ""placeholder"": ""https://identity.bitwarden.com"", ""type"": ""text"", ""name"": ""identityEndpoint""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Bitwarden Api Url"", ""placeholder"": ""https://api.bitwarden.com"", ""type"": ""text"", ""name"": ""apiEndpoint""}}, {""type"": ""OAuthForm"", ""parameters"": {""clientIdLabel"": ""Client ID"", ""clientSecretLabel"": ""Client Secret"", ""connectButtonLabel"": ""Connect"", ""disconnectButtonLabel"": ""Disconnect""}}], ""title"": ""Connect Bitwarden Event Logs to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Bitwarden Client Id and Client Secret"", ""description"": ""Your API key can be found in the Bitwarden organization admin console. Please see [Bitwarden documentation](https://bitwarden.com/help/public-api/#authentication) for more information.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Bitwarden/Data%20Connectors/BitwardenEventLogs/definitions.json","true" -"BitwardenGroups_CL","Bitwarden","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Bitwarden","8bit-solutions-llc","bitwarden-sentinel-integration","2024-05-12","2024-10-02","","Bitwarden Inc","Partner","https://bitwarden.com","","domains","BitwardenEventLogs","Bitwarden Inc","Bitwarden Event Logs","This connector provides insight into activity of your Bitwarden organization such as user's activity (logged in, changed password, 2fa, etc.), cipher activity (created, updated, deleted, shared, etc.), collection activity, organization activity, and more.","[{""description"": ""Your API key can be found in the Bitwarden organization admin console.\nPlease see [Bitwarden documentation](https://bitwarden.com/help/public-api/#authentication) for more information.\nSelf-hosted Bitwarden servers may need to reconfigure their installation's URL."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Bitwarden Identity Url"", ""placeholder"": ""https://identity.bitwarden.com"", ""type"": ""text"", ""name"": ""identityEndpoint""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Bitwarden Api Url"", ""placeholder"": ""https://api.bitwarden.com"", ""type"": ""text"", ""name"": ""apiEndpoint""}}, {""type"": ""OAuthForm"", ""parameters"": {""clientIdLabel"": ""Client ID"", ""clientSecretLabel"": ""Client Secret"", ""connectButtonLabel"": ""Connect"", ""disconnectButtonLabel"": ""Disconnect""}}], ""title"": ""Connect Bitwarden Event Logs to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Bitwarden Client Id and Client Secret"", ""description"": ""Your API key can be found in the Bitwarden organization admin console. Please see [Bitwarden documentation](https://bitwarden.com/help/public-api/#authentication) for more information.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Bitwarden/Data%20Connectors/BitwardenEventLogs/definitions.json","true" -"BitwardenMembers_CL","Bitwarden","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Bitwarden","8bit-solutions-llc","bitwarden-sentinel-integration","2024-05-12","2024-10-02","","Bitwarden Inc","Partner","https://bitwarden.com","","domains","BitwardenEventLogs","Bitwarden Inc","Bitwarden Event Logs","This connector provides insight into activity of your Bitwarden organization such as user's activity (logged in, changed password, 2fa, etc.), cipher activity (created, updated, deleted, shared, etc.), collection activity, organization activity, and more.","[{""description"": ""Your API key can be found in the Bitwarden organization admin console.\nPlease see [Bitwarden documentation](https://bitwarden.com/help/public-api/#authentication) for more information.\nSelf-hosted Bitwarden servers may need to reconfigure their installation's URL."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Bitwarden Identity Url"", ""placeholder"": ""https://identity.bitwarden.com"", ""type"": ""text"", ""name"": ""identityEndpoint""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Bitwarden Api Url"", ""placeholder"": ""https://api.bitwarden.com"", ""type"": ""text"", ""name"": ""apiEndpoint""}}, {""type"": ""OAuthForm"", ""parameters"": {""clientIdLabel"": ""Client ID"", ""clientSecretLabel"": ""Client Secret"", ""connectButtonLabel"": ""Connect"", ""disconnectButtonLabel"": ""Disconnect""}}], ""title"": ""Connect Bitwarden Event Logs to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Bitwarden Client Id and Client Secret"", ""description"": ""Your API key can be found in the Bitwarden organization admin console. Please see [Bitwarden documentation](https://bitwarden.com/help/public-api/#authentication) for more information.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Bitwarden/Data%20Connectors/BitwardenEventLogs/definitions.json","true" -"","Blackberry CylancePROTECT","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Blackberry%20CylancePROTECT","azuresentinel","azure-sentinel-solution-blackberrycylanceprotect","2022-05-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"Syslog","Blackberry CylancePROTECT","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Blackberry%20CylancePROTECT","azuresentinel","azure-sentinel-solution-blackberrycylanceprotect","2022-05-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","BlackberryCylancePROTECT","Blackberry","[Deprecated] Blackberry CylancePROTECT","The [Blackberry CylancePROTECT](https://www.blackberry.com/us/en/products/blackberry-protect) connector allows you to easily connect your CylancePROTECT logs with Microsoft Sentinel. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias CyclanePROTECT and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Blackberry%20CylancePROTECT/Parsers/CylancePROTECT.txt), on the second line of the query, enter the hostname(s) of your CyclanePROTECT device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n\n1. Select the link below to open your workspace **agents configuration**, and select the **Syslog** tab.\n2. Select **Add facility** and choose from the drop-down list of facilities. Repeat for all the facilities you want to add.\n3. Mark the check boxes for the desired severities for each facility.\n4. Click **Apply**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure and connect the CylancePROTECT"", ""description"": ""[Follow these instructions](https://docs.blackberry.com/content/dam/docs-blackberry-com/release-pdfs/en/cylance-products/syslog-guides/Cylance%20Syslog%20Guide%20v2.0%20rev12.pdf) to configure the CylancePROTECT to forward syslog. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""CylancePROTECT"", ""description"": ""must be configured to export logs via Syslog.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Blackberry%20CylancePROTECT/Data%20Connectors/template_BlackberryCylancePROTECT.JSON","true" -"","BlinkOps","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BlinkOps","blinkoperations1709924858838","azure-sentinel-blink_automation","2025-05-05","","","Blink Support","Partner","https://support.blinkops.com","","domains","","","","","","","false","","false" -"","BloodHound Enterprise","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BloodHound%20Enterprise","azurehoundenterprise","bloodhoundenterprise-azuresentinel","2023-05-04","2021-05-04","","SpecterOps","Partner","https://bloodhoundenterprise.io/","","domains","","","","","","","false","","false" -"BHEAttackPathsData_CL","BloodHound Enterprise","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BloodHound%20Enterprise","azurehoundenterprise","bloodhoundenterprise-azuresentinel","2023-05-04","2021-05-04","","SpecterOps","Partner","https://bloodhoundenterprise.io/","","domains","BloodHoundEnterprise","SpecterOps","Bloodhound Enterprise","The solution is designed to test Bloodhound Enterprise package creation process.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a 'BloodHound Enterprise' to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieve BloodHound Enterprise API Key and ID**\n\nTo enable the Azure Function to authenticate successfully and pull logs into Microsoft Sentinel, you must first obtain the API Key and ID from your BloodHound Enterprise instance. See the documentation to learn more about API on the `https://bloodhound.specterops.io/integrations/bloodhound-api/working-with-api`.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the 'BloodHound Enterprise' connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the 'BloodHound Enterprise' API authorization key(s) or Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": ""**Option 1 - Azure Resource Manager (ARM) Template**\n\nUse this method for automated deployment of the 'BloodHound Enterprise' connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)]()\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Tenant URL**, **API Key**, **API ID** 'and/or Other required fields'. \n>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": """", ""description"": ""**Option 2 - Manual Deployment of Azure Functions**\n\n Use the following step-by-step instructions to deploy the 'BloodHound Enterprise' connector manually with Azure Functions.""}, {""title"": ""1. Create a Function App"", ""description"": ""1. From the Azure Portal, navigate to [Function App](https://portal.azure.com/#blade/HubsExtension/BrowseResource/resourceType/Microsoft.Web%2Fsites/kind/functionapp).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, ensure Runtime stack is set to **python 3.11**. \n4. In the **Hosting** tab, ensure **Plan type** is set to **'Consumption (Serverless)'**.\n5.select Storage account\n6. 'Add other required configurations'. \n5. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""2. Import Function App Code(Zip deployment)"", ""description"": ""1. Install Azure CLI\n2. From terminal type **az functionapp deployment source config-zip -g -n --src ** and hit enter. Set the `ResourceGroup` value to: your resource group name. Set the `FunctionApp` value to: your newly created function app name. Set the `Zip File` value to: `digitalshadowsConnector.zip`(path to your zip file). Note:- Download the zip file from the link - [Function App Code](https://github.com/metron-labs/Azure-Sentinel/blob/bloodhound/Solutions/BloodHound/Data%20Connectors/BloodHoundAzureFunction.zip)""}, {""title"": ""3. Configure the Function App"", ""description"": ""1. In the Function App screen, click the Function App name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following 'x (number of)' application settings individually, under Name, with their respective string values (case-sensitive) under Value: \n\t\tDigitalShadowsAccountID\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tDigitalShadowsKey\n\t\tDigitalShadowsSecret\n\t\tHistoricalDays\n\t\tDigitalShadowsURL\n\t\tClassificationFilterOperation\n\t\tHighVariabilityClassifications\n\t\tFUNCTION_NAME\n\t\tlogAnalyticsUri (optional)\n(add any other settings required by the Function App)\nSet the `DigitalShadowsURL` value to: `https://api.searchlight.app/v1`\nSet the `HighVariabilityClassifications` value to: `exposed-credential,marked-document`\nSet the `ClassificationFilterOperation` value to: `exclude` for exclude function app or `include` for include function app \n>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Azure Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: https://.ods.opinsights.azure.us. \n4. Once all application settings have been entered, click **Save**.""}, {""instructions"": [{""type"": ""InstructionStepsGroup"", ""parameters"": {""enable"": true, ""instructionSteps"": [{""title"": ""**STEP 3 - Register the Application in Microsoft Entra ID"", ""description"": ""1. **Open the [Microsoft Entra ID page](https://entra.microsoft.com/)**:\n - Click the provided link to open the **Microsoft Entra ID** registration page in a new tab.\n - Ensure you are logged in with an account that has **Admin level** permissions.\n\n2. **Create a New Application**:\n - In the **Microsoft Entra ID portal**, select **App registrations** mentioned on the left-hand side tab.\n - Click on **+ New registration**.\n - Fill out the following fields:\n - **Name**: Enter a name for the app (e.g., \u201cBloodHound App\u201d).\n - **Supported account types**: Choose **Accounts in this organizational directory only** (Default Directory only - Single tenant).\n - **Redirect URI**: Leave this blank unless required otherwise.\n - Click **Register** to create the application.\n\n3. **Copy Application and Tenant IDs**:\n - Once the app is registered, note the **Application (client) ID** and **Directory (tenant) ID** from the **Overview** page. You\u2019ll need these for the integration.\n\n4. **Create a Client Secret**:\n - In the **Certificates & secrets** section, click **+ New client secret**.\n - Add a description (e.g., 'BloodHound Secret') and set an expiration (e.g., 1 year).\n - Click **Add**.\n - **Copy the client secret value immediately**, as it will not be shown again.""}, {""title"": ""**STEP 4 - Assign the \""Monitoring Metrics Publisher\"" Role to the App"", ""description"": ""1. **Open the Resource Group in Azure Portal**:\n - Navigate to the **Resource Group** that contains the **Log Analytics Workspace** and **Data Collection Rules (DCRs)** where you want the app to push data.\n\n2. **Assign the Role**:\n - In the **Resource Group** menu, click on **Access control (IAM)** mentioned on the left-hand side tab ..\n - Click on **+ Add** and select **Add role assignment**.\n - In the **Role** dropdown, search for and select the **Monitoring Metrics Publisher** role.\n - Under **Assign access to**, choose **Azure AD user, group, or service principal**.\n - In the **Select** field, search for your registered app by **name** or **client ID**.\n - Click **Save** to assign the role to the application.""}, {""title"": ""**STEP 5 - Deploy the ARM Template"", ""description"": ""1. **Retrieve the Workspace ID**:\n - After assigning the role, you will need the **Workspace ID**.\n - Navigate to the **Log Analytics Workspace** within the **Resource Group**.\n - In the **Overview** section, locate the **Workspace ID** field under **Workspace details**.\n - **Copy the Workspace ID** and keep it handy for the next steps.\n\n2. **Click the Deploy to Azure Button**:\n - [![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fmetron-labs%2FAzure-Sentinel%2Fbloodhound%2FSolutions%2FBloodHound%2FData%2520Connectors%2FDeployToAzure.json).\n - This will take you directly to the Azure portal to start the deployment.\n\n3. **Review and Customize Parameters**:\n - On the custom deployment page, ensure you\u2019re deploying to the correct **subscription** and **resource group**.\n - Fill in the parameters like **workspace name**, **workspace ID**, and **workspace location**.\n\n4. **Click Review + Create** and then **Create** to deploy the resources.""}, {""title"": ""**STEP 6 - Verify DCE, DCR, and Log Analytics Table Setup"", ""description"": ""1. **Check the Data Collection Endpoint (DCE)**:\n - After deploying, go to **Azure Portal > Data Collection Endpoints**.\n - Verify that the **BloodHoundDCE** endpoint has been created successfully.\n - **Copy the DCE Logs Ingestion URI**, as you\u2019ll need this for generating the webhook URL.\n\n2. **Confirm Data Collection Rule (DCR) Setup**:\n - Go to **Azure Portal > Data Collection Rules**.\n - Ensure the **BloodHoundDCR** rule is present.\n - **Copy the Immutable ID** of the DCR from the Overview page, as you\u2019ll need it for the webhook URL.\n\n3. **Validate Log Analytics Table**:\n - Navigate to your **Log Analytics Workspace** (linked to Microsoft Sentinel).\n - Under the **Tables** section, verify that the **BloodHoundTable_CL** table has been created successfully and is ready to receive data.""}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**BloodHound Enterprise API key & Id** is required. See the documentation to learn more about API on the `https://bloodhound.specterops.io/integrations/bloodhound-api/working-with-api`.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BloodHound%20Enterprise/Data%20Connectors/BloodHoundFunction.json","true" -"","Box","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Box","azuresentinel","azure-sentinel-solution-box","2022-05-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"BoxEvents_CL","Box","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Box","azuresentinel","azure-sentinel-solution-box","2022-05-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","BoxDataConnector","Box","Box","The Box data connector provides the capability to ingest [Box enterprise's events](https://developer.box.com/guides/events/#admin-events) into Microsoft Sentinel using the Box REST API. Refer to [Box documentation](https://developer.box.com/guides/events/enterprise-events/for-enterprise/) for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Box REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": "">**NOTE:** This connector depends on a parser based on Kusto Function to work as expected [**BoxEvents**](https://aka.ms/sentinel-BoxDataConnector-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration of the Box events collection**\n\nSee documentation to [setup JWT authentication](https://developer.box.com/guides/authentication/jwt/jwt-setup/) and [obtain JSON file with credentials](https://developer.box.com/guides/authentication/jwt/with-sdk/#prerequisites).""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Box data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Box JSON configuration file, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Box data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-BoxDataConnector-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **AzureSentinelWorkspaceId**, **AzureSentinelSharedKey**, **BoxConfigJSON**\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Box data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the [Azure Function App](https://aka.ms/sentinel-BoxDataConnector-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tAzureSentinelWorkspaceId\n\t\tAzureSentinelSharedKey\n\t\tBOX_CONFIG_JSON\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Box API Credentials"", ""description"": ""Box config JSON file is required for Box REST API JWT authentication. [See the documentation to learn more about JWT authentication](https://developer.box.com/guides/authentication/jwt/).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Box/Data%20Connectors/Box_API_FunctionApp.json","true" -"BoxEventsV2_CL","Box","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Box","azuresentinel","azure-sentinel-solution-box","2022-05-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","BoxEventsCCPDefinition","Microsoft","Box Events (CCP)","The Box data connector provides the capability to ingest [Box enterprise's events](https://developer.box.com/guides/events/#admin-events) into Microsoft Sentinel using the Box REST API. Refer to [Box documentation](https://developer.box.com/guides/events/enterprise-events/for-enterprise/) for more information.","[{""description"": "">**NOTE:** This connector uses Codeless Connecor Platform (CCP) to connect to the Box REST API to pull logs into Microsoft Sentinel.""}, {""description"": "">**NOTE:** This connector depends on a parser based on Kusto Function to work as expected [**BoxEvents**](https://aka.ms/sentinel-BoxDataConnector-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""description"": ""**STEP 1 - Create Box Custom Application**\n\nSee documentation to [setup client credentials authentication](https://developer.box.com/guides/authentication/client-credentials/client-credentials-setup/)\n""}, {""description"": ""**STEP 2 - Grab Client ID and Client Secret values**\n\nYou might need to setup 2FA to fetch the secret.\n""}, {""description"": ""**STEP 3 - Grab Box Enterprise ID from Box Admin Console**\n\nSee documentation to [find Enterprise ID](https://developer.box.com/platform/appendix/locating-values/)\n""}, {""description"": ""Provide the required values below:\n"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Box Enterprise ID"", ""placeholder"": ""123456"", ""type"": ""text"", ""name"": ""boxEnterpriseId""}}, {""type"": ""OAuthForm"", ""parameters"": {""clientIdLabel"": ""Client ID"", ""clientSecretLabel"": ""Client Secret"", ""connectButtonLabel"": ""Connect"", ""disconnectButtonLabel"": ""Disconnect""}}], ""title"": ""Connect to Box to start collecting event logs to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Box API credentials"", ""description"": ""Box API requires a Box App client ID and client secret to authenticate. [See the documentation to learn more about Client Credentials grant](https://developer.box.com/guides/authentication/client-credentials/client-credentials-setup/)""}, {""name"": ""Box Enterprise ID"", ""description"": ""Box Enterprise ID is required to make the connection. See documentation to [find Enterprise ID](https://developer.box.com/platform/appendix/locating-values/)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Box/Data%20Connectors/BoxEvents_ccp/BoxEvents_DataConnectorDefinition.json","true" -"BoxEvents_CL","Box","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Box","azuresentinel","azure-sentinel-solution-box","2022-05-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","BoxEventsCCPDefinition","Microsoft","Box Events (CCP)","The Box data connector provides the capability to ingest [Box enterprise's events](https://developer.box.com/guides/events/#admin-events) into Microsoft Sentinel using the Box REST API. Refer to [Box documentation](https://developer.box.com/guides/events/enterprise-events/for-enterprise/) for more information.","[{""description"": "">**NOTE:** This connector uses Codeless Connecor Platform (CCP) to connect to the Box REST API to pull logs into Microsoft Sentinel.""}, {""description"": "">**NOTE:** This connector depends on a parser based on Kusto Function to work as expected [**BoxEvents**](https://aka.ms/sentinel-BoxDataConnector-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""description"": ""**STEP 1 - Create Box Custom Application**\n\nSee documentation to [setup client credentials authentication](https://developer.box.com/guides/authentication/client-credentials/client-credentials-setup/)\n""}, {""description"": ""**STEP 2 - Grab Client ID and Client Secret values**\n\nYou might need to setup 2FA to fetch the secret.\n""}, {""description"": ""**STEP 3 - Grab Box Enterprise ID from Box Admin Console**\n\nSee documentation to [find Enterprise ID](https://developer.box.com/platform/appendix/locating-values/)\n""}, {""description"": ""Provide the required values below:\n"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Box Enterprise ID"", ""placeholder"": ""123456"", ""type"": ""text"", ""name"": ""boxEnterpriseId""}}, {""type"": ""OAuthForm"", ""parameters"": {""clientIdLabel"": ""Client ID"", ""clientSecretLabel"": ""Client Secret"", ""connectButtonLabel"": ""Connect"", ""disconnectButtonLabel"": ""Disconnect""}}], ""title"": ""Connect to Box to start collecting event logs to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Box API credentials"", ""description"": ""Box API requires a Box App client ID and client secret to authenticate. [See the documentation to learn more about Client Credentials grant](https://developer.box.com/guides/authentication/client-credentials/client-credentials-setup/)""}, {""name"": ""Box Enterprise ID"", ""description"": ""Box Enterprise ID is required to make the connection. See documentation to [find Enterprise ID](https://developer.box.com/platform/appendix/locating-values/)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Box/Data%20Connectors/BoxEvents_ccp/BoxEvents_DataConnectorDefinition.json","true" -"","Broadcom SymantecDLP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Broadcom%20SymantecDLP","azuresentinel","azure-sentinel-solution-broadcomsymantecdlp","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"CommonSecurityLog","Broadcom SymantecDLP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Broadcom%20SymantecDLP","azuresentinel","azure-sentinel-solution-broadcomsymantecdlp","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","BroadcomSymantecDLP","Broadcom","[Deprecated] Broadcom Symantec DLP via Legacy Agent","The [Broadcom Symantec Data Loss Prevention (DLP)](https://www.broadcom.com/products/cyber-security/information-protection/data-loss-prevention) connector allows you to easily connect your Symantec DLP with Microsoft Sentinel, to create custom dashboards, alerts, and improve investigation. This gives you more insight into your organization’s information, where it travels, and improves your security operation capabilities.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias SymantecDLP and load the function code or click [here](https://aka.ms/sentinel-symantecdlp-parser). The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python \u2013version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Symantec DLP logs to a Syslog agent"", ""description"": ""Configure Symantec DLP to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent.\n1. [Follow these instructions](https://knowledge.broadcom.com/external/article/159509/generating-syslog-messages-from-data-los.html) to configure the Symantec DLP to forward syslog\n2. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python \u2013version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Broadcom%20SymantecDLP/Data%20Connectors/Connector_Syslog_SymantecDLP.json","true" -"CommonSecurityLog","Broadcom SymantecDLP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Broadcom%20SymantecDLP","azuresentinel","azure-sentinel-solution-broadcomsymantecdlp","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","BroadcomSymantecDLPAma","Broadcom","[Deprecated] Broadcom Symantec DLP via AMA","The [Broadcom Symantec Data Loss Prevention (DLP)](https://www.broadcom.com/products/cyber-security/information-protection/data-loss-prevention) connector allows you to easily connect your Symantec DLP with Microsoft Sentinel, to create custom dashboards, alerts, and improve investigation. This gives you more insight into your organization’s information, where it travels, and improves your security operation capabilities.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias SymantecDLP and load the function code or click [here](https://aka.ms/sentinel-symantecdlp-parser). The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Forward Symantec DLP logs to a Syslog agent"", ""description"": ""Configure Symantec DLP to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent.\n1. [Follow these instructions](https://knowledge.broadcom.com/external/article/159509/generating-syslog-messages-from-data-los.html) to configure the Symantec DLP to forward syslog\n2. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address."", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Broadcom%20SymantecDLP/Data%20Connectors/template_SymantecDLPAMA.json","true" -"","Business Email Compromise - Financial Fraud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Business%20Email%20Compromise%20-%20Financial%20Fraud","azuresentinel","azure-sentinel-solution-bec_financialfraud","2023-08-04","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"","CTERA","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CTERA","cteranetworksltd1651947437632","ctera-microsoft-sentinel","2024-07-28","","","CTERA","Partner","https://www.ctera.com/","","domains","","","","","","","false","","false" -"Syslog","CTERA","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CTERA","cteranetworksltd1651947437632","ctera-microsoft-sentinel","2024-07-28","","","CTERA","Partner","https://www.ctera.com/","","domains","CTERA","CTERA Networks Ltd","CTERA Syslog","The CTERA Data Connector for Microsoft Sentinel offers monitoring and threat detection capabilities for your CTERA solution.
It includes a workbook visualizing the sum of all operations per type, deletions, and denied access operations.
It also provides analytic rules which detects ransomware incidents and alert you when a user is blocked due to suspicious ransomware activity.
Additionally, it helps you identify critical patterns such as mass access denied events, mass deletions, and mass permission changes, enabling proactive threat management and response.","[{""title"": ""Step 1: Connect CTERA Platform to Syslog"", ""description"": ""Set up your CTERA portal syslog connection and Edge-Filer Syslog connector"", ""instructions"": [{""parameters"": {""title"": ""CTERA Syslog Configuration"", ""instructionSteps"": [{""title"": ""Portal Syslog connection"", ""description"": ""Connect CTERA Portal to syslog server, see instructions https://kb.ctera.com/v1/docs/en/managing-log-settings?highlight=logg""}, {""title"": ""Edge Filer Audit logs"", ""description"": ""Enable Audit logs on the desired Edge-filers""}, {""title"": ""Edge-Filer Syslog Service"", ""description"": ""Enable Edge-Filer Syslog service, see instructions https://kb.ctera.com/v1/docs/en/setting-up-the-edge-filer-syslog-service-2?highlight=Edge%20Filer%20Syslog""}]}}]}, {""title"": ""Step 2: Install Azure Monitor Agent (AMA) on Syslog Server"", ""description"": ""Install the Azure Monitor Agent (AMA) on your syslog server to enable data collection."", ""instructions"": [{""parameters"": {""title"": ""Install Azure Monitor Agent"", ""instructionSteps"": [{""title"": ""Log in to Azure Portal"", ""description"": ""Use your Azure credentials to log in to the Azure Portal.""}, {""title"": ""Navigate to Azure Arc"", ""description"": ""In the Azure Portal, go to 'Azure Arc' and select your connected syslog server.""}, {""title"": ""Select Extensions"", ""description"": ""In the Azure Arc settings for your syslog server, navigate to the 'Extensions' section.""}, {""title"": ""Add Extension"", ""description"": ""Click on 'Add' and select 'Azure Monitor Agent' from the list of available extensions.""}, {""title"": ""Install AMA"", ""description"": ""Follow the prompts to install the Azure Monitor Agent on your syslog server. For detailed instructions, refer to the official documentation: [Install Azure Monitor Agent](https://learn.microsoft.com/en-us/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal)""}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CTERA/Data%20Connectors/CTERA_Data_Connector.json","true" -"","CTM360","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CTM360","ctm360wll1698919697848","ctm360_microsoft_sentinel_solution","2023-10-23","","","Cyber Threat Management 360","Partner","https://www.ctm360.com/","","domains","","","","","","","false","","false" -"CBSLog_Azure_1_CL","CTM360","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CTM360","ctm360wll1698919697848","ctm360_microsoft_sentinel_solution","2023-10-23","","","Cyber Threat Management 360","Partner","https://www.ctm360.com/","","domains","CBSPollingIDAzureFunctions","CTM360","Cyber Blind Spot Integration","Through the API integration, you have the capability to retrieve all the issues related to your CBS organizations via a RESTful interface.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a 'CyberBlindSpot' to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the 'CyberBlindSpot' API**\n\nThe provider should provide or link to detailed steps to configure the 'CyberBlindSpot' API endpoint so that the Azure Function can authenticate to it successfully, get its authorization key or token, and pull the appliance's logs into Microsoft Sentinel.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the 'CyberBlindSpot' connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the 'CyberBlindSpot' API authorization key(s) readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": ""**Option 1 - Azure Resource Manager (ARM) Template**\n\nUse this method for automated deployment of the 'CyberBlindSpot' connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CTM360-CBS-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-CTM360-CBS-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **API **, 'and/or Other required fields'. \n>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the CTM360 CBS data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development.\n\n1. Download the [Azure Function App](https://raw.githubusercontent.com/CTM360-Integrations/Azure-Sentinel/ctm360-HV-CBS-azurefunctionapp/Solutions/CTM360/Data%20Connectors/CBS/AzureFunctionCTM360_CBS.zip) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CTIXYZ).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tCTM360AccountID\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tCTM360Key\n\t\tFUNCTION_NAME\n\t\tlogAnalyticsUri - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CTM360/Data%20Connectors/CBS/CTM360_CBS_API_functionApp.json","true" -"HackerViewLog_Azure_1_CL","CTM360","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CTM360","ctm360wll1698919697848","ctm360_microsoft_sentinel_solution","2023-10-23","","","Cyber Threat Management 360","Partner","https://www.ctm360.com/","","domains","HVPollingIDAzureFunctions","CTM360","HackerView Intergration","Through the API integration, you have the capability to retrieve all the issues related to your HackerView organizations via a RESTful interface.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a '' to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the 'HackerView' API**\n\nThe provider should provide or link to detailed steps to configure the 'HackerView' API endpoint so that the Azure Function can authenticate to it successfully, get its authorization key or token, and pull the appliance's logs into Microsoft Sentinel.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the 'HackerView' connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the 'HackerView' API authorization key(s) readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": ""**Option 1 - Azure Resource Manager (ARM) Template**\n\nUse this method for automated deployment of the 'HackerView' connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CTM360-HV-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-CTM360-HV-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **API **, 'and/or Other required fields'. \n>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": """", ""description"": ""**Option 2 - Manual Deployment of Azure Functions**\n\n Use the following step-by-step instructions to deploy the 'HackerView' connector manually with Azure Functions.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the CTM360 CBS data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development.\n\n1. Download the [Azure Function App](https://raw.githubusercontent.com/CTM360-Integrations/Azure-Sentinel/ctm360-HV-CBS-azurefunctionapp/Solutions/CTM360/Data%20Connectors/HackerView/AzureFunctionCTM360_HV.zip) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CTIXYZ).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tCTM360AccountID\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tCTM360Key\n\t\tFUNCTION_NAME\n\t\tlogAnalyticsUri - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CTM360/Data%20Connectors/HackerView/CTM360_HV_API_FunctionApp.json","true" -"","Check Point","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Check%20Point","checkpoint","checkpoint-sentinel-solutions","2021-08-13","","","Check Point","Partner","https://www.checkpoint.com/support-services/contact-support/","","domains","","","","","","","false","","false" -"","Check Point CloudGuard CNAPP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Check%20Point%20CloudGuard%20CNAPP","checkpoint","checkpoint-sentinel-solutions-cloud-guard","2024-11-12","","","Check Point","Partner","https://www.checkpoint.com/support-services/contact-support/","","domains","","","","","","","false","","false" -"CloudGuard_SecurityEvents_CL","Check Point CloudGuard CNAPP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Check%20Point%20CloudGuard%20CNAPP","checkpoint","checkpoint-sentinel-solutions-cloud-guard","2024-11-12","","","Check Point","Partner","https://www.checkpoint.com/support-services/contact-support/","","domains","CloudGuardCCPDefinition","CheckPoint","Check Point CloudGuard CNAPP Connector for Microsoft Sentinel","The [CloudGuard](https://sc1.checkpoint.com/documents/CloudGuard_Dome9/Documentation/Overview/CloudGuard-CSPM-Introduction.htm?cshid=help_center_documentation) data connector enables the ingestion of security events from the CloudGuard API into Microsoft Sentinel™, using Microsoft Sentinel’s Codeless Connector Platform. The connector supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) which parses incoming security event data into custom columns. This pre-parsing process eliminates the need for query-time parsing, resulting in improved performance for data queries.","[{""description"": ""To enable the CloudGuard connector for Microsoft Sentinel, enter the required information below and select Connect.\n>"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""API Key ID"", ""placeholder"": ""api_key"", ""type"": ""text"", ""name"": ""api_key""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key Secret"", ""placeholder"": ""api_secret"", ""type"": ""password"", ""name"": ""api_secret""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CloudGuard Endpoint URL"", ""placeholder"": ""e.g. https://api.dome9.com"", ""type"": ""text"", ""name"": ""endpoint_url""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Filter"", ""placeholder"": ""Paste filter from CloudGuard"", ""type"": ""text"", ""name"": ""query_filter""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""title"": ""Connect CloudGuard Security Events to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""CloudGuard API Key"", ""description"": ""Refer to the instructions provided [here](https://sc1.checkpoint.com/documents/CloudGuard_Dome9/Documentation/Settings/Users-Roles.htm#add_service) to generate an API key.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Check%20Point%20CloudGuard%20CNAPP/Data%20Connectors/CloudGuard_ccp/CloudGuard_DataConnectorDefinition.json","true" -"","Check Point Cyberint Alerts","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Check%20Point%20Cyberint%20Alerts","checkpoint","checkpoint-cyberint-solutions-alerts","2025-03-18","","","Cyberint","Partner","https://cyberint.com/customer-support/","","domains","","","","","","","false","","false" -"argsentdc_CL","Check Point Cyberint Alerts","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Check%20Point%20Cyberint%20Alerts","checkpoint","checkpoint-cyberint-solutions-alerts","2025-03-18","","","Cyberint","Partner","https://cyberint.com/customer-support/","","domains","CheckPointCyberintAlerts","Checkpoint Cyberint","Check Point Cyberint Alerts Connector (via Codeless Connector Platform)","Cyberint, a Check Point company, provides a Microsoft Sentinel integration to streamline critical Alerts and bring enriched threat intelligence from the Infinity External Risk Management solution into Microsoft Sentinel. This simplifies the process of tracking the status of tickets with automatic sync updates across systems. Using this new integration for Microsoft Sentinel, existing Cyberint and Microsoft Sentinel customers can easily pull logs based on Cyberint's findings into Microsoft Sentinel platform.","[{""title"": ""Connect Checkpoint Cyberint Alerts to Microsoft Sentinel"", ""description"": ""To enable the connector provide the required information below and click on Connect.\n>"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Argos URL"", ""placeholder"": ""Argos URL"", ""type"": ""text"", ""name"": ""argosurl""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""password"", ""name"": ""apikey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Customer Name"", ""placeholder"": ""Customer Name"", ""type"": ""text"", ""name"": ""customername""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""Connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Check Point Cyberint API Key, Argos URL, and Customer Name"", ""description"": ""The connector API key, Argos URL, and Customer Name are required""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Check%20Point%20Cyberint%20Alerts/Data%20Connectors/CyberintArgosAlertsLogs_ccp/CyberintArgosAlertsLogs_connectorDefinition.json","true" -"","Check Point Cyberint IOC","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Check%20Point%20Cyberint%20IOC","checkpoint","azure-sentinel-checkpoint-cyberint-ioc","2025-04-29","","","Cyberint","Partner","https://cyberint.com/customer-support/","","domains","","","","","","","false","","false" -"iocsent_CL","Check Point Cyberint IOC","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Check%20Point%20Cyberint%20IOC","checkpoint","azure-sentinel-checkpoint-cyberint-ioc","2025-04-29","","","Cyberint","Partner","https://cyberint.com/customer-support/","","domains","CheckPointCyberintIOC","Checkpoint Cyberint","Check Point Cyberint IOC Connector","This is data connector for Check Point Cyberint IOC.","[{""title"": ""Connect Checkpoint Cyberint Alerts to Microsoft Sentinel"", ""description"": ""To enable the connector provide the required information below and click on Connect.\n>"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Argos URL"", ""placeholder"": ""Argos URL"", ""type"": ""text"", ""name"": ""argosurl""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API key"", ""placeholder"": ""API key"", ""type"": ""text"", ""name"": ""apikey""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""Connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Check Point Cyberint API Key and Argos URL"", ""description"": ""The connector API key and Argos URL are required""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Check%20Point%20Cyberint%20IOC/Data%20Connectors/CyberintArgosIOCLogs_ccp/CyberintArgosIOCLogs_connectorDefinition.json","true" -"","CheckPhish by Bolster","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CheckPhish%20by%20Bolster","azuresentinel","azure-sentinel-solution-checkphishbybolster","2022-10-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"","Cisco ACI","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20ACI","azuresentinel","azure-sentinel-solution-ciscoaci","2021-07-03","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"Syslog","Cisco ACI","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20ACI","azuresentinel","azure-sentinel-solution-ciscoaci","2021-07-03","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoACI","Cisco","[Deprecated] Cisco Application Centric Infrastructure","[Cisco Application Centric Infrastructure (ACI)](https://www.cisco.com/c/en/us/solutions/collateral/data-center-virtualization/application-centric-infrastructure/solution-overview-c22-741487.html) data connector provides the capability to ingest [Cisco ACI logs](https://www.cisco.com/c/en/us/td/docs/switches/datacenter/aci/apic/sw/all/syslog/guide/b_ACI_System_Messages_Guide/m-aci-system-messages-reference.html) into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**CiscoACIEvent**](https://aka.ms/sentinel-CiscoACI-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**NOTE:** This data connector has been developed using Cisco ACI Release 1.x"", ""instructions"": []}, {""title"": ""1. Configure Cisco ACI system sending logs via Syslog to remote server where you will install the agent."", ""description"": ""[Follow these steps](https://www.cisco.com/c/en/us/td/docs/switches/datacenter/aci/apic/sw/1-x/basic-config/b_ACI_Config_Guide/b_ACI_Config_Guide_chapter_010.html#d2933e4611a1635) to configure Syslog Destination, Destination Group, and Syslog Source.""}, {""title"": ""2. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Server to which the logs will be forwarded.\n\n> Logs on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""3. Check logs in Microsoft Sentinel"", ""description"": ""Open Log Analytics to check if the logs are received using the Syslog schema.\n\n>**NOTE:** It may take up to 15 minutes before new logs will appear in Syslog table."", ""instructions"": []}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20ACI/Data%20Connectors/CiscoACI_Syslog.json","true" -"","Cisco ETD","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20ETD","cisco","cisco-etd-sentinel","2024-03-04","","","Cisco Systems","Partner","","","domains","","","","","","","false","","false" -"CiscoETD_CL","Cisco ETD","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20ETD","cisco","cisco-etd-sentinel","2024-03-04","","","Cisco Systems","Partner","","","domains","CiscoETD","Cisco","Cisco ETD","The connector fetches data from ETD api for threat analysis","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ETD API to pull its logs into Microsoft Sentinel.""}, {""title"": """", ""description"": ""**Follow the deployment steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the ETD data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following).\n"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco ETD data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CiscoETD-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Region**. \n3. Enter the **WorkspaceID**, **SharedKey**, **ClientID**, **ClientSecret**, **ApiKey**, **Verdicts**, **ETD Region**\n4. Click **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Email Threat Defense API, API key, Client ID and Secret"", ""description"": ""Ensure you have the API key, Client ID and Secret key.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20ETD/Data%20Connectors/CiscoETD_API_FunctionApp.json","true" -"","Cisco Firepower EStreamer","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Firepower%20EStreamer","cisco","cisco-firepower-estreamer","2022-05-25","","","Cisco","Partner","https://www.cisco.com/c/en_in/support/index.html","","domains","","","","","","","false","","false" -"CommonSecurityLog","Cisco Firepower EStreamer","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Firepower%20EStreamer","cisco","cisco-firepower-estreamer","2022-05-25","","","Cisco","Partner","https://www.cisco.com/c/en_in/support/index.html","","domains","CiscoFirepowerEStreamer","Cisco","[Deprecated] Cisco Firepower eStreamer via Legacy Agent","eStreamer is a Client Server API designed for the Cisco Firepower NGFW Solution. The eStreamer client requests detailed event data on behalf of the SIEM or logging solution in the Common Event Format (CEF).","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 25226 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Install the Firepower eNcore client"", ""description"": ""Install and configure the Firepower eNcore eStreamer client, for more details see full install [guide](https://www.cisco.com/c/en/us/td/docs/security/firepower/670/api/eStreamer_enCore/eStreamereNcoreSentinelOperationsGuide_409.html)"", ""innerSteps"": [{""title"": ""2.1 Download the Firepower Connector from github"", ""description"": ""Download the latest version of the Firepower eNcore connector for Microsoft Sentinel [here](https://github.com/CiscoSecurity/fp-05-microsoft-sentinel-connector). If you plan on using python3 use the [python3 eStreamer connector](https://github.com/CiscoSecurity/fp-05-microsoft-sentinel-connector/tree/python3)""}, {""title"": ""2.2 Create a pkcs12 file using the Azure/VM Ip Address"", ""description"": ""Create a pkcs12 certificate using the public IP of the VM instance in Firepower under System->Integration->eStreamer, for more information please see install [guide](https://www.cisco.com/c/en/us/td/docs/security/firepower/670/api/eStreamer_enCore/eStreamereNcoreSentinelOperationsGuide_409.html#_Toc527049443)""}, {""title"": ""2.3 Test Connectivity between the Azure/VM Client and the FMC"", ""description"": ""Copy the pkcs12 file from the FMC to the Azure/VM instance and run the test utility (./encore.sh test) to ensure a connection can be established, for more details please see the setup [guide](https://www.cisco.com/c/en/us/td/docs/security/firepower/670/api/eStreamer_enCore/eStreamereNcoreSentinelOperationsGuide_409.html#_Toc527049430)""}, {""title"": ""2.4 Configure encore to stream data to the agent"", ""description"": ""Configure encore to stream data via TCP to the Microsoft Agent, this should be enabled by default, however, additional ports and streaming protocols can configured depending on your network security posture, it is also possible to save the data to the file system, for more information please see [Configure Encore](https://www.cisco.com/c/en/us/td/docs/security/firepower/670/api/eStreamer_enCore/eStreamereNcoreSentinelOperationsGuide_409.html#_Toc527049433)""}]}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Firepower%20EStreamer/Data%20Connectors/CiscoFirepowerEStreamerCollector.json","true" -"CommonSecurityLog","Cisco Firepower EStreamer","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Firepower%20EStreamer","cisco","cisco-firepower-estreamer","2022-05-25","","","Cisco","Partner","https://www.cisco.com/c/en_in/support/index.html","","domains","CiscoFirepowerEStreamerAma","Cisco","[Deprecated] Cisco Firepower eStreamer via AMA","eStreamer is a Client Server API designed for the Cisco Firepower NGFW Solution. The eStreamer client requests detailed event data on behalf of the SIEM or logging solution in the Common Event Format (CEF).","[{""title"": """", ""description"": """", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Install the Firepower eNcore client"", ""description"": ""Install and configure the Firepower eNcore eStreamer client, for more details see full install [guide](https://www.cisco.com/c/en/us/td/docs/security/firepower/670/api/eStreamer_enCore/eStreamereNcoreSentinelOperationsGuide_409.html)"", ""innerSteps"": [{""title"": ""1. Download the Firepower Connector from github"", ""description"": ""Download the latest version of the Firepower eNcore connector for Microsoft Sentinel [here](https://github.com/CiscoSecurity/fp-05-microsoft-sentinel-connector). If you plan on using python3 use the [python3 eStreamer connector](https://github.com/CiscoSecurity/fp-05-microsoft-sentinel-connector/tree/python3)""}, {""title"": ""2. Create a pkcs12 file using the Azure/VM Ip Address"", ""description"": ""Create a pkcs12 certificate using the public IP of the VM instance in Firepower under System->Integration->eStreamer, for more information please see install [guide](https://www.cisco.com/c/en/us/td/docs/security/firepower/670/api/eStreamer_enCore/eStreamereNcoreSentinelOperationsGuide_409.html#_Toc527049443)""}, {""title"": ""3. Test Connectivity between the Azure/VM Client and the FMC"", ""description"": ""Copy the pkcs12 file from the FMC to the Azure/VM instance and run the test utility (./encore.sh test) to ensure a connection can be established, for more details please see the setup [guide](https://www.cisco.com/c/en/us/td/docs/security/firepower/670/api/eStreamer_enCore/eStreamereNcoreSentinelOperationsGuide_409.html#_Toc527049430)""}, {""title"": ""4. Configure encore to stream data to the agent"", ""description"": ""Configure encore to stream data via TCP to the Microsoft Agent, this should be enabled by default, however, additional ports and streaming protocols can configured depending on your network security posture, it is also possible to save the data to the file system, for more information please see [Configure Encore](https://www.cisco.com/c/en/us/td/docs/security/firepower/670/api/eStreamer_enCore/eStreamereNcoreSentinelOperationsGuide_409.html#_Toc527049433)""}]}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Firepower%20EStreamer/Data%20Connectors/template_CiscoFirepowerEStreamerAMA.json","true" -"","Cisco ISE","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20ISE","azuresentinel","azure-sentinel-solution-ciscoise","2021-07-03","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"Syslog","Cisco ISE","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20ISE","azuresentinel","azure-sentinel-solution-ciscoise","2021-07-03","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoISE","Cisco","[Deprecated] Cisco Identity Services Engine","The Cisco Identity Services Engine (ISE) data connector provides the capability to ingest [Cisco ISE](https://www.cisco.com/c/en/us/products/security/identity-services-engine/index.html) events into Microsoft Sentinel. It helps you gain visibility into what is happening in your network, such as who is connected, which applications are installed and running, and much more. Refer to [Cisco ISE logging mechanism documentation](https://www.cisco.com/c/en/us/td/docs/security/ise/2-7/admin_guide/b_ise_27_admin_guide/b_ISE_admin_27_maintain_monitor.html#reference_BAFBA5FA046A45938810A5DF04C00591) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://aka.ms/sentinel-ciscoise-parser) to create the Kusto Functions alias, **CiscoISEEvent**"", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n2. Select **Apply below configuration to my machines** and select the facilities and severities.\n3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure Cisco ISE Remote Syslog Collection Locations"", ""description"": ""[Follow these instructions](https://www.cisco.com/c/en/us/td/docs/security/ise/2-7/admin_guide/b_ise_27_admin_guide/b_ISE_admin_27_maintain_monitor.html#ID58) to configure remote syslog collection locations in your Cisco ISE deployment.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20ISE/Data%20Connectors/Connector_Cisco_ISE.json","true" -"","Cisco Meraki Events via REST API","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Meraki%20Events%20via%20REST%20API","azuresentinel","azure-sentinel-solution-ciscomerakinativepoller","2023-07-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"","Cisco SD-WAN","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20SD-WAN","cisco","cisco-catalyst-sdwan-sentinel","2023-06-01","2024-06-01","","Cisco Systems","Partner","https://globalcontacts.cloudapps.cisco.com/contacts/contactDetails/en_US/c1o1-c2o2-c3o8","","domains","","","","","","","false","","false" -"CiscoSDWANNetflow_CL","Cisco SD-WAN","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20SD-WAN","cisco","cisco-catalyst-sdwan-sentinel","2023-06-01","2024-06-01","","Cisco Systems","Partner","https://globalcontacts.cloudapps.cisco.com/contacts/contactDetails/en_US/c1o1-c2o2-c3o8","","domains","CiscoSDWAN","Cisco","Cisco Software Defined WAN","The Cisco Software Defined WAN(SD-WAN) data connector provides the capability to ingest [Cisco SD-WAN](https://www.cisco.com/c/en_in/solutions/enterprise-networks/sd-wan/index.html) Syslog and Netflow data into Microsoft Sentinel.","[{""description"": ""**To ingest Cisco SD-WAN Syslog and Netflow data into Microsoft Sentinel follow the steps below.**""}, {""title"": ""1. Steps to ingest Syslog data to Microsoft sentinel"", ""description"": ""Azure Monitor Agent will be used to collect the syslog data into Microsoft sentinel. For that first need to create an azure arc server for the VM from which syslog data will be sent.\n""}, {""title"": ""1.1 Steps to Add Azure Arc Server"", ""description"": ""1. In Azure portal, go to Servers - Azure Arc and click on Add.\n2. Select Generate Script under Add a single server section. A User can also generate scripts for Multiple Servers as well.\n3. Review the information on the Prerequisites page, then select Next.\n4. On the Resource details page, provide the subscription and resource group of the Microsoft Sentinel, Region, Operating system and Connectivity method. Then select Next.\n5. On the Tags page, review the default Physical location tags suggested and enter a value, or specify one or more Custom tags to support your standards. Then select Next\n6. Select Download to save the script file. \n7. Now that you have generated the script, the next step is to run it on the server that you want to onboard to Azure Arc. \n8. If you have Azure VM follow the steps mentioned in the [link](https://learn.microsoft.com/azure/azure-arc/servers/plan-evaluate-on-azure-virtual-machine) before running the script. \n9. Run the script by the following command: `./.sh`\n10. After you install the agent and configure it to connect to Azure Arc-enabled servers, go to the Azure portal to verify that the server has successfully connected. View your machine in the Azure portal.\n> **Reference link:** [https://learn.microsoft.com/azure/azure-arc/servers/learn/quick-enable-hybrid-vm](https://learn.microsoft.com/azure/azure-arc/servers/learn/quick-enable-hybrid-vm)""}, {""title"": ""1.2 Steps to Create Data Collection Rule (DCR)"", ""description"": ""1. In Azure Portal search for Monitor. Under Settings, select Data Collection Rules and Select Create.\n2. On the Basics panel, enter the Rule Name, Subscription, Resource group, Region and Platform Type.\n3. Select Next: Resources.\n4. Select Add resources.Use the filters to find the virtual machine that you'll use to collect logs.\n5. Select the virtual machine. Select Apply.\n6. Select Next: Collect and deliver.\n7. Select Add data source. For Data source type, select Linux syslog. \n8. For Minimum log level, leave the default values LOG_DEBUG.\n9. Select Next: Destination.\n10. Select Add destination and add Destination type, Subscription and Account or namespace.\n11. Select Add data source. Select Next: Review + create.\n12. Select Create. Wait for 20 minutes. In Microsoft Sentinel or Azure Monitor, verify that the Azure Monitor agent is running on your VM.\n> **Reference link:** [https://learn.microsoft.com/azure/sentinel/forward-syslog-monitor-agent](https://learn.microsoft.com/azure/sentinel/forward-syslog-monitor-agent)""}, {""title"": ""2. Steps to ingest Netflow data to Microsoft sentinel"", ""description"": ""To Ingest Netflow data into Microsoft sentinel, Filebeat and Logstash needs to be installed and configured on the VM. After the configuration, vm will be able to receive netflow data on the configured port and that data will be ingested into the workspace of Microsoft sentinel.\n""}, {""title"": ""2.1 Install filebeat and logstash"", ""description"": ""1. For the installation of filebeat and logstash using apt refer to this doc: \n 1. Filebeat: [https://www.elastic.co/guide/en/beats/filebeat/current/setup-repositories.html](https://www.elastic.co/guide/en/beats/filebeat/current/setup-repositories.html). \n 2. Logstash: [https://www.elastic.co/guide/en/logstash/current/installing-logstash.html](https://www.elastic.co/guide/en/logstash/current/installing-logstash.html). \n2. For the installation of filebeat and logstash for RedHat based Linux (yum) steps are as follows: \n 1. Filebeat: [https://www.elastic.co/guide/en/beats/filebeat/current/setup-repositories.html#_yum](https://www.elastic.co/guide/en/beats/filebeat/current/setup-repositories.html#_yum). \n 2. Logstash: [https://www.elastic.co/guide/en/logstash/current/installing-logstash.html#_yum](https://www.elastic.co/guide/en/logstash/current/installing-logstash.html#_yum)""}, {""title"": ""2.2 Configure Filebeat to send events to Logstash"", ""description"": ""1. Edit filebeat.yml file: `vi /etc/filebeat/filebeat.yml` \n2. Comment out the Elasticsearch Output section. \n3. Uncomment Logstash Output section (Uncomment out only these two lines)-\n\t\toutput.logstash\n\t\thosts: [\""localhost:5044\""] \n3. In the Logstash Output section, if you want to send the data other than the default port i.e. 5044 port, then replace the port number in the hosts field. (Note: This port should be added in the conf file, while configuring logstash.) \n4. In the 'filebeat.inputs' section comment out existing configuration and add the following configuration: \n\t\t- type: netflow\n\t\t max_message_size: 10KiB\n\t\t host: \""0.0.0.0:2055\""\n\t\t protocols: [ v5, v9, ipfix ]\n\t\t expiration_timeout: 30m\n\t\t queue_size: 8192\n\t\t custom_definitions:\n\t\t - /etc/filebeat/custom.yml\n\t\t detect_sequence_reset: true\n\t\t enabled: true \n6. In the Filebeat inputs section, if you want to receive the data other than the default port i.e. 2055 port, then replace the port number in the host field. \n7. Add the provided [custom.yml](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/Cisco%20SD-WAN/Data%20Connectors/custom.yml) file inside the /etc/filebeat/ directory. \n8. Open the filebeat input and output port in the firewall. \n 1. Run command: `firewall-cmd --zone=public --permanent --add-port=2055/udp` \n 2. Run command: `firewall-cmd --zone=public --permanent --add-port=5044/udp` \n> Note: if a custom port is added for filebeat input/output, then open that port in the firewall.""}, {""title"": ""2.3 Configure Logstash to send events to Microsoft Sentinel"", ""description"": ""1. Install the Azure Log Analytics plugin: \n 1. Run Command: `sudo /usr/share/logstash/bin/logstash-plugin install microsoft-logstash-output-azure-loganalytics` \n3. Store the Log Analytics workspace key in the Logstash key store. The workspace key can be found in Azure Portal under Log analytic workspace > Select workspace > Under Settings select Agent > Log Analytics agent instructions. \n4. Copy the Primary key and run the following commands: \n 1. `sudo /usr/share/logstash/bin/logstash-keystore --path.settings /etc/logstash create LogAnalyticsKey` \n 2. `sudo /usr/share/logstash/bin/logstash-keystore --path.settings /etc/logstash add LogAnalyticsKey` \n5. Create the configuration file /etc/logstash/cisco-netflow-to-sentinel.conf: \n\t\tinput {\n\t\t beats {\n\t\t port => #(Enter output port number which has been configured during filebeat configuration i.e. filebeat.yml file .)\n\t\t }\n\t\t}\n\t\toutput {\n\t\t microsoft-logstash-output-azure-loganalytics {\n\t\t workspace_id => \""\""\n\t\t workspace_key => \""${LogAnalyticsKey}\""\n\t\t custom_log_table_name => \""CiscoSDWANNetflow\""\n\t\t }\n\t\t} \n> Note: If table is not present in Microsoft sentinel, then it will create a new table in sentinel.""}, {""title"": ""2.4 Run Filebeat:"", ""description"": ""1. Open a terminal and run the command: \n> `systemctl start filebeat` \n2. This command will start running filebeat in the background. To see the logs stop the filebeat (`systemctl stop filebeat`) then run the following command: \n> `filebeat run -e`""}, {""title"": ""2.5 Run Logstash:"", ""description"": ""1. In another terminal run the command: \n> `/usr/share/logstash/bin/logstash --path.settings /etc/logstash -f /etc/logstash/cisco-netflow-to-sentinel.conf &` \n2. This command will start running the logstash in the background. To see the logs of logstash kill the above process and run the following command : \n> `/usr/share/logstash/bin/logstash --path.settings /etc/logstash -f /etc/logstash/cisco-netflow-to-sentinel.conf`""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20SD-WAN/Data%20Connectors/CiscoSDWAN.json","true" -"Syslog","Cisco SD-WAN","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20SD-WAN","cisco","cisco-catalyst-sdwan-sentinel","2023-06-01","2024-06-01","","Cisco Systems","Partner","https://globalcontacts.cloudapps.cisco.com/contacts/contactDetails/en_US/c1o1-c2o2-c3o8","","domains","CiscoSDWAN","Cisco","Cisco Software Defined WAN","The Cisco Software Defined WAN(SD-WAN) data connector provides the capability to ingest [Cisco SD-WAN](https://www.cisco.com/c/en_in/solutions/enterprise-networks/sd-wan/index.html) Syslog and Netflow data into Microsoft Sentinel.","[{""description"": ""**To ingest Cisco SD-WAN Syslog and Netflow data into Microsoft Sentinel follow the steps below.**""}, {""title"": ""1. Steps to ingest Syslog data to Microsoft sentinel"", ""description"": ""Azure Monitor Agent will be used to collect the syslog data into Microsoft sentinel. For that first need to create an azure arc server for the VM from which syslog data will be sent.\n""}, {""title"": ""1.1 Steps to Add Azure Arc Server"", ""description"": ""1. In Azure portal, go to Servers - Azure Arc and click on Add.\n2. Select Generate Script under Add a single server section. A User can also generate scripts for Multiple Servers as well.\n3. Review the information on the Prerequisites page, then select Next.\n4. On the Resource details page, provide the subscription and resource group of the Microsoft Sentinel, Region, Operating system and Connectivity method. Then select Next.\n5. On the Tags page, review the default Physical location tags suggested and enter a value, or specify one or more Custom tags to support your standards. Then select Next\n6. Select Download to save the script file. \n7. Now that you have generated the script, the next step is to run it on the server that you want to onboard to Azure Arc. \n8. If you have Azure VM follow the steps mentioned in the [link](https://learn.microsoft.com/azure/azure-arc/servers/plan-evaluate-on-azure-virtual-machine) before running the script. \n9. Run the script by the following command: `./.sh`\n10. After you install the agent and configure it to connect to Azure Arc-enabled servers, go to the Azure portal to verify that the server has successfully connected. View your machine in the Azure portal.\n> **Reference link:** [https://learn.microsoft.com/azure/azure-arc/servers/learn/quick-enable-hybrid-vm](https://learn.microsoft.com/azure/azure-arc/servers/learn/quick-enable-hybrid-vm)""}, {""title"": ""1.2 Steps to Create Data Collection Rule (DCR)"", ""description"": ""1. In Azure Portal search for Monitor. Under Settings, select Data Collection Rules and Select Create.\n2. On the Basics panel, enter the Rule Name, Subscription, Resource group, Region and Platform Type.\n3. Select Next: Resources.\n4. Select Add resources.Use the filters to find the virtual machine that you'll use to collect logs.\n5. Select the virtual machine. Select Apply.\n6. Select Next: Collect and deliver.\n7. Select Add data source. For Data source type, select Linux syslog. \n8. For Minimum log level, leave the default values LOG_DEBUG.\n9. Select Next: Destination.\n10. Select Add destination and add Destination type, Subscription and Account or namespace.\n11. Select Add data source. Select Next: Review + create.\n12. Select Create. Wait for 20 minutes. In Microsoft Sentinel or Azure Monitor, verify that the Azure Monitor agent is running on your VM.\n> **Reference link:** [https://learn.microsoft.com/azure/sentinel/forward-syslog-monitor-agent](https://learn.microsoft.com/azure/sentinel/forward-syslog-monitor-agent)""}, {""title"": ""2. Steps to ingest Netflow data to Microsoft sentinel"", ""description"": ""To Ingest Netflow data into Microsoft sentinel, Filebeat and Logstash needs to be installed and configured on the VM. After the configuration, vm will be able to receive netflow data on the configured port and that data will be ingested into the workspace of Microsoft sentinel.\n""}, {""title"": ""2.1 Install filebeat and logstash"", ""description"": ""1. For the installation of filebeat and logstash using apt refer to this doc: \n 1. Filebeat: [https://www.elastic.co/guide/en/beats/filebeat/current/setup-repositories.html](https://www.elastic.co/guide/en/beats/filebeat/current/setup-repositories.html). \n 2. Logstash: [https://www.elastic.co/guide/en/logstash/current/installing-logstash.html](https://www.elastic.co/guide/en/logstash/current/installing-logstash.html). \n2. For the installation of filebeat and logstash for RedHat based Linux (yum) steps are as follows: \n 1. Filebeat: [https://www.elastic.co/guide/en/beats/filebeat/current/setup-repositories.html#_yum](https://www.elastic.co/guide/en/beats/filebeat/current/setup-repositories.html#_yum). \n 2. Logstash: [https://www.elastic.co/guide/en/logstash/current/installing-logstash.html#_yum](https://www.elastic.co/guide/en/logstash/current/installing-logstash.html#_yum)""}, {""title"": ""2.2 Configure Filebeat to send events to Logstash"", ""description"": ""1. Edit filebeat.yml file: `vi /etc/filebeat/filebeat.yml` \n2. Comment out the Elasticsearch Output section. \n3. Uncomment Logstash Output section (Uncomment out only these two lines)-\n\t\toutput.logstash\n\t\thosts: [\""localhost:5044\""] \n3. In the Logstash Output section, if you want to send the data other than the default port i.e. 5044 port, then replace the port number in the hosts field. (Note: This port should be added in the conf file, while configuring logstash.) \n4. In the 'filebeat.inputs' section comment out existing configuration and add the following configuration: \n\t\t- type: netflow\n\t\t max_message_size: 10KiB\n\t\t host: \""0.0.0.0:2055\""\n\t\t protocols: [ v5, v9, ipfix ]\n\t\t expiration_timeout: 30m\n\t\t queue_size: 8192\n\t\t custom_definitions:\n\t\t - /etc/filebeat/custom.yml\n\t\t detect_sequence_reset: true\n\t\t enabled: true \n6. In the Filebeat inputs section, if you want to receive the data other than the default port i.e. 2055 port, then replace the port number in the host field. \n7. Add the provided [custom.yml](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/Cisco%20SD-WAN/Data%20Connectors/custom.yml) file inside the /etc/filebeat/ directory. \n8. Open the filebeat input and output port in the firewall. \n 1. Run command: `firewall-cmd --zone=public --permanent --add-port=2055/udp` \n 2. Run command: `firewall-cmd --zone=public --permanent --add-port=5044/udp` \n> Note: if a custom port is added for filebeat input/output, then open that port in the firewall.""}, {""title"": ""2.3 Configure Logstash to send events to Microsoft Sentinel"", ""description"": ""1. Install the Azure Log Analytics plugin: \n 1. Run Command: `sudo /usr/share/logstash/bin/logstash-plugin install microsoft-logstash-output-azure-loganalytics` \n3. Store the Log Analytics workspace key in the Logstash key store. The workspace key can be found in Azure Portal under Log analytic workspace > Select workspace > Under Settings select Agent > Log Analytics agent instructions. \n4. Copy the Primary key and run the following commands: \n 1. `sudo /usr/share/logstash/bin/logstash-keystore --path.settings /etc/logstash create LogAnalyticsKey` \n 2. `sudo /usr/share/logstash/bin/logstash-keystore --path.settings /etc/logstash add LogAnalyticsKey` \n5. Create the configuration file /etc/logstash/cisco-netflow-to-sentinel.conf: \n\t\tinput {\n\t\t beats {\n\t\t port => #(Enter output port number which has been configured during filebeat configuration i.e. filebeat.yml file .)\n\t\t }\n\t\t}\n\t\toutput {\n\t\t microsoft-logstash-output-azure-loganalytics {\n\t\t workspace_id => \""\""\n\t\t workspace_key => \""${LogAnalyticsKey}\""\n\t\t custom_log_table_name => \""CiscoSDWANNetflow\""\n\t\t }\n\t\t} \n> Note: If table is not present in Microsoft sentinel, then it will create a new table in sentinel.""}, {""title"": ""2.4 Run Filebeat:"", ""description"": ""1. Open a terminal and run the command: \n> `systemctl start filebeat` \n2. This command will start running filebeat in the background. To see the logs stop the filebeat (`systemctl stop filebeat`) then run the following command: \n> `filebeat run -e`""}, {""title"": ""2.5 Run Logstash:"", ""description"": ""1. In another terminal run the command: \n> `/usr/share/logstash/bin/logstash --path.settings /etc/logstash -f /etc/logstash/cisco-netflow-to-sentinel.conf &` \n2. This command will start running the logstash in the background. To see the logs of logstash kill the above process and run the following command : \n> `/usr/share/logstash/bin/logstash --path.settings /etc/logstash -f /etc/logstash/cisco-netflow-to-sentinel.conf`""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20SD-WAN/Data%20Connectors/CiscoSDWAN.json","true" -"","Cisco Secure Cloud Analytics","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Secure%20Cloud%20Analytics","azuresentinel","azure-sentinel-solution-ciscostealthwatch","2021-10-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"Syslog","Cisco Secure Cloud Analytics","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Secure%20Cloud%20Analytics","azuresentinel","azure-sentinel-solution-ciscostealthwatch","2021-10-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","Stealthwatch","Cisco","[Deprecated] Cisco Secure Cloud Analytics","The [Cisco Secure Cloud Analytics](https://www.cisco.com/c/en/us/products/security/stealthwatch/index.html) data connector provides the capability to ingest [Cisco Secure Cloud Analytics events](https://www.cisco.com/c/dam/en/us/td/docs/security/stealthwatch/management_console/securit_events_alarm_categories/7_4_2_Security_Events_and_Alarm_Categories_DV_2_1.pdf) into Microsoft Sentinel. Refer to [Cisco Secure Cloud Analytics documentation](https://www.cisco.com/c/dam/en/us/td/docs/security/stealthwatch/system_installation_configuration/7_5_0_System_Configuration_Guide_DV_1_3.pdf) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**StealthwatchEvent**](https://aka.ms/sentinel-stealthwatch-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**NOTE:** This data connector has been developed using Cisco Secure Cloud Analytics version 7.3.2"", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Server where the Cisco Secure Cloud Analytics logs are forwarded.\n\n> Logs from Cisco Secure Cloud Analytics Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure Cisco Secure Cloud Analytics event forwarding"", ""description"": ""Follow the configuration steps below to get Cisco Secure Cloud Analytics logs into Microsoft Sentinel.\n1. Log in to the Stealthwatch Management Console (SMC) as an administrator.\n2. In the menu bar, click **Configuration** **>** **Response Management**.\n3. From the **Actions** section in the **Response Management** menu, click **Add > Syslog Message**.\n4. In the Add Syslog Message Action window, configure parameters.\n5. Enter the following custom format:\n|Lancope|Stealthwatch|7.3|{alarm_type_id}|0x7C|src={source_ip}|dst={target_ip}|dstPort={port}|proto={protocol}|msg={alarm_type_description}|fullmessage={details}|start={start_active_time}|end={end_active_time}|cat={alarm_category_name}|alarmID={alarm_id}|sourceHG={source_host_group_names}|targetHG={target_host_group_names}|sourceHostSnapshot={source_url}|targetHostSnapshot={target_url}|flowCollectorName={device_name}|flowCollectorIP={device_ip}|domain={domain_name}|exporterName={exporter_hostname}|exporterIPAddress={exporter_ip}|exporterInfo={exporter_label}|targetUser={target_username}|targetHostname={target_hostname}|sourceUser={source_username}|alarmStatus={alarm_status}|alarmSev={alarm_severity_name}\n\n6. Select the custom format from the list and click **OK**\n7. Click **Response Management > Rules**.\n8. Click **Add** and select **Host Alarm**.\n9. Provide a rule name in the **Name** field.\n10. Create rules by selecting values from the Type and Options menus. To add more rules, click the ellipsis icon. For a Host Alarm, combine as many possible types in a statement as possible.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Secure%20Cloud%20Analytics/Data%20Connectors/Cisco_Stealthwatch_syslog.json","true" -"","Cisco Secure Endpoint","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Secure%20Endpoint","azuresentinel","azure-sentinel-solution-ciscosecureendpoint","2021-10-28","2022-02-02","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"CiscoSecureEndpoint_CL","Cisco Secure Endpoint","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Secure%20Endpoint","azuresentinel","azure-sentinel-solution-ciscosecureendpoint","2021-10-28","2022-02-02","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoSecureEndpoint","Cisco","[DEPRECATED] Cisco Secure Endpoint (AMP)","The Cisco Secure Endpoint (formerly AMP for Endpoints) data connector provides the capability to ingest Cisco Secure Endpoint [audit logs](https://api-docs.amp.cisco.com/api_resources/AuditLog?api_host=api.amp.cisco.com&api_version=v1) and [events](https://api-docs.amp.cisco.com/api_actions/details?api_action=GET+%2Fv1%2Fevents&api_host=api.amp.cisco.com&api_resource=Event&api_version=v1) into Microsoft Sentinel.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Cisco Secure Endpoint API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**CiscoSecureEndpoint**](https://aka.ms/sentinel-ciscosecureendpoint-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Obtaining Cisco Secure Endpoint API credentials**\n\n1. Follow the instructions in the [documentation](https://api-docs.amp.cisco.com/api_resources?api_host=api.amp.cisco.com&api_version=v1) to generate Client ID and API Key.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as Azure Blob Storage connection string and container name, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ciscosecureendpoint-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Cisco Secure Endpoint Api Host**, **Cisco Secure Endpoint Client Id**, **Cisco Secure Endpoint Api Key**, **Microsoft Sentinel Workspace Id**, **Microsoft Sentinel Shared Key**\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-ciscosecureendpoint-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions.\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tCISCO_SE_API_API_HOST\n\t\tCISCO_SE_API_CLIENT_ID\n\t\tCISCO_SE_API_KEY\n\t\tWORKSPACE_ID\n\t\tSHARED_KEY\n\t\tlogAnalyticsUri (Optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://WORKSPACE_ID.ods.opinsights.azure.us`. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Cisco Secure Endpoint API credentials"", ""description"": ""Cisco Secure Endpoint Client ID and API Key are required. [See the documentation to learn more about Cisco Secure Endpoint API](https://api-docs.amp.cisco.com/api_resources?api_host=api.amp.cisco.com&api_version=v1). [API domain](https://api-docs.amp.cisco.com) must be provided as well.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Secure%20Endpoint/Data%20Connectors/CiscoSecureEndpoint_API_FunctionApp.json","true" -"CiscoSecureEndpointAuditLogsV2_CL","Cisco Secure Endpoint","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Secure%20Endpoint","azuresentinel","azure-sentinel-solution-ciscosecureendpoint","2021-10-28","2022-02-02","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoSecureEndpointLogsCCPDefinition","Microsoft","Cisco Secure Endpoint (via Codeless Connector Framework)","The Cisco Secure Endpoint (formerly AMP for Endpoints) data connector provides the capability to ingest Cisco Secure Endpoint [audit logs](https://developer.cisco.com/docs/secure-endpoint/auditlog/) and [events](https://developer.cisco.com/docs/secure-endpoint/v1-api-reference-event/) into Microsoft Sentinel.","[{""description"": ""To ingest data from Cisco Secure Endpoint to Microsoft Sentinel, you have to click on Add Account button below, then you get a pop up to fill the details like Email, Organization, Client ID, API Key and Region, provide the required information and click on Connect. You can see the connected organizations/emails in the below grid.\n>"", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Organization"", ""columnValue"": ""properties.addOnAttributes.Organization""}, {""columnName"": ""Email"", ""columnValue"": ""properties.addOnAttributes.Email""}, {""columnName"": ""Endpoint"", ""columnValue"": ""properties.request.apiEndpoint""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add Account"", ""title"": ""Add Account"", ""subtitle"": ""Add Account"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Cisco Secure Endpoint Email"", ""placeholder"": ""Enter your Cisco Email"", ""type"": ""text"", ""name"": ""email"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Cisco Secure Endpoint Organization"", ""placeholder"": ""Enter the name of your Organization"", ""type"": ""text"", ""name"": ""organization"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Cisco Secure Endpoint Client ID"", ""placeholder"": ""Enter your Client ID"", ""type"": ""text"", ""name"": ""username"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Cisco Secure Endpoint API Key"", ""placeholder"": ""Enter your API Key"", ""type"": ""password"", ""name"": ""apiKey"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Cisco Secure Endpoint Region"", ""placeholder"": ""Enter the region you want to connect"", ""type"": ""text"", ""name"": ""region"", ""required"": true, ""description"": ""For example, if your region is https://api.apjc.amp.cisco.com then enter only apjc.amp in the above field. Follow the link provided in the Cisco Secure Endpoint API Credentials/Regions section for better understanding of the regions.""}}]}]}}], ""title"": ""Connect Cisco Secure Endpoint to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Cisco Secure Endpoint API Credentials/Regions"", ""description"": ""To create API Credentials and to understand the regions, follow the document link provided here. [Click here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Secure%20Endpoint/Data%20Connectors/README.md).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Secure%20Endpoint/Data%20Connectors/CiscoSecureEndpointLogs_ccp/CiscoSecureEndpointLogs_ConnectorDefinition.json","true" -"CiscoSecureEndpointEventsV2_CL","Cisco Secure Endpoint","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Secure%20Endpoint","azuresentinel","azure-sentinel-solution-ciscosecureendpoint","2021-10-28","2022-02-02","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoSecureEndpointLogsCCPDefinition","Microsoft","Cisco Secure Endpoint (via Codeless Connector Framework)","The Cisco Secure Endpoint (formerly AMP for Endpoints) data connector provides the capability to ingest Cisco Secure Endpoint [audit logs](https://developer.cisco.com/docs/secure-endpoint/auditlog/) and [events](https://developer.cisco.com/docs/secure-endpoint/v1-api-reference-event/) into Microsoft Sentinel.","[{""description"": ""To ingest data from Cisco Secure Endpoint to Microsoft Sentinel, you have to click on Add Account button below, then you get a pop up to fill the details like Email, Organization, Client ID, API Key and Region, provide the required information and click on Connect. You can see the connected organizations/emails in the below grid.\n>"", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Organization"", ""columnValue"": ""properties.addOnAttributes.Organization""}, {""columnName"": ""Email"", ""columnValue"": ""properties.addOnAttributes.Email""}, {""columnName"": ""Endpoint"", ""columnValue"": ""properties.request.apiEndpoint""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add Account"", ""title"": ""Add Account"", ""subtitle"": ""Add Account"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Cisco Secure Endpoint Email"", ""placeholder"": ""Enter your Cisco Email"", ""type"": ""text"", ""name"": ""email"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Cisco Secure Endpoint Organization"", ""placeholder"": ""Enter the name of your Organization"", ""type"": ""text"", ""name"": ""organization"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Cisco Secure Endpoint Client ID"", ""placeholder"": ""Enter your Client ID"", ""type"": ""text"", ""name"": ""username"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Cisco Secure Endpoint API Key"", ""placeholder"": ""Enter your API Key"", ""type"": ""password"", ""name"": ""apiKey"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Cisco Secure Endpoint Region"", ""placeholder"": ""Enter the region you want to connect"", ""type"": ""text"", ""name"": ""region"", ""required"": true, ""description"": ""For example, if your region is https://api.apjc.amp.cisco.com then enter only apjc.amp in the above field. Follow the link provided in the Cisco Secure Endpoint API Credentials/Regions section for better understanding of the regions.""}}]}]}}], ""title"": ""Connect Cisco Secure Endpoint to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Cisco Secure Endpoint API Credentials/Regions"", ""description"": ""To create API Credentials and to understand the regions, follow the document link provided here. [Click here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Secure%20Endpoint/Data%20Connectors/README.md).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Secure%20Endpoint/Data%20Connectors/CiscoSecureEndpointLogs_ccp/CiscoSecureEndpointLogs_ConnectorDefinition.json","true" -"","Cisco UCS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20UCS","azuresentinel","azure-sentinel-solution-ciscoucs","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"Syslog","Cisco UCS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20UCS","azuresentinel","azure-sentinel-solution-ciscoucs","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoUCS","Cisco","[Deprecated] Cisco UCS","The [Cisco Unified Computing System (UCS)](https://www.cisco.com/c/en/us/products/servers-unified-computing/index.html) connector allows you to easily connect your Cisco UCS logs with Microsoft Sentinel This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias CiscoUCS and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20UCS/Parsers/CiscoUCS.yaml). The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n 2. Select **Apply below configuration to my machines** and select the facilities and severities.\n 3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure and connect the Cisco UCS"", ""description"": ""[Follow these instructions](https://www.cisco.com/c/en/us/support/docs/servers-unified-computing/ucs-manager/110265-setup-syslog-for-ucs.html#configsremotesyslog) to configure the Cisco UCS to forward syslog. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Cisco UCS"", ""description"": ""must be configured to export logs via Syslog""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20UCS/Data%20Connectors/Connector_Syslog_CiscoUCS.json","true" -"","CiscoASA","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoASA","azuresentinel","azure-sentinel-solution-ciscoasa","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"CommonSecurityLog","CiscoASA","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoASA","azuresentinel","azure-sentinel-solution-ciscoasa","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoASA","Cisco","Cisco ASA via Legacy Agent","The Cisco ASA firewall connector allows you to easily connect your Cisco ASA logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python --version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Cisco ASA logs to Syslog agent"", ""description"": ""Configure Cisco ASA to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent.\n\nGo to\u00a0[Send Syslog messages to an external Syslog server](https://aka.ms/asi-syslog-cisco-forwarding), and follow the instructions to set up the connection. Use these parameters when prompted:\n\n1. Set \""port\"" to 514.\n2. Set \""syslog_ip\"" to the IP address of the Syslog agent.\n\n\n[Learn more >](https://aka.ms/CEFCisco)""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python --version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoASA/Data%20Connectors/CiscoASA.JSON","true" -"CommonSecurityLog","CiscoASA","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoASA","azuresentinel","azure-sentinel-solution-ciscoasa","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoAsaAma","Microsoft","Cisco ASA/FTD via AMA","The Cisco ASA firewall connector allows you to easily connect your Cisco ASA logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": ""Enable data collection rule\u200b"", ""description"": ""> Cisco ASA/FTD event logs are collected only from **Linux** agents."", ""instructions"": [{""type"": ""CiscoAsaAma""}]}, {""instructions"": [{""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 4}, ""type"": ""InstallAgent""}]}, {""title"": ""Run the following command to install and apply the Cisco ASA/FTD collector:"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces/datasources"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace data sources"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoASA/Data%20Connectors/template_CiscoAsaAma.JSON","true" -"","CiscoDuoSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoDuoSecurity","cisco","duo-security-sentinel","2022-01-07","","","Cisco Systems","Partner","https://duo.com/support","","domains","","","","","","","false","","false" -"CiscoDuo_CL","CiscoDuoSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoDuoSecurity","cisco","duo-security-sentinel","2022-01-07","","","Cisco Systems","Partner","https://duo.com/support","","domains","CiscoDuoSecurity","Cisco","Cisco Duo Security","The Cisco Duo Security data connector provides the capability to ingest [authentication logs](https://duo.com/docs/adminapi#authentication-logs), [administrator logs](https://duo.com/docs/adminapi#administrator-logs), [telephony logs](https://duo.com/docs/adminapi#telephony-logs), [offline enrollment logs](https://duo.com/docs/adminapi#offline-enrollment-logs) and [Trust Monitor events](https://duo.com/docs/adminapi#trust-monitor) into Microsoft Sentinel using the Cisco Duo Admin API. Refer to [API documentation](https://duo.com/docs/adminapi) for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Cisco Duo API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**CiscoDuo**](https://aka.ms/sentinel-CiscoDuoSecurity-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Obtaining Cisco Duo Admin API credentials**\n\n1. Follow [the instructions](https://duo.com/docs/adminapi#first-steps) to obtain **integration key**, **secret key**, and **API hostname**. Use **Grant read log** permission in the 4th step of [the instructions](https://duo.com/docs/adminapi#first-steps).""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as Azure Blob Storage connection string and container name, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CiscoDuoSecurity-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-CiscoDuoSecurity-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Cisco Duo Integration Key**, **Cisco Duo Secret Key**, **Cisco Duo API Hostname**, **Cisco Duo Log Types**, **Microsoft Sentinel Workspace Id**, **Microsoft Sentinel Shared Key**\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the [Azure Function App](https://aka.ms/sentinel-CiscoDuoSecurity-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tCISCO_DUO_INTEGRATION_KEY\n\t\tCISCO_DUO_SECRET_KEY\n\t\tCISCO_DUO_API_HOSTNAME\n\t\tCISCO_DUO_LOG_TYPES\n\t\tWORKSPACE_ID\n\t\tSHARED_KEY\n\t\tlogAnalyticsUri (Optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://WORKSPACE_ID.ods.opinsights.azure.us`. \n4. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Cisco Duo API credentials"", ""description"": ""Cisco Duo API credentials with permission *Grant read log* is required for Cisco Duo API. See the [documentation](https://duo.com/docs/adminapi#first-steps) to learn more about creating Cisco Duo API credentials.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoDuoSecurity/Data%20Connectors/CiscoDuo_API_FunctionApp.json","true" -"","CiscoMeraki","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoMeraki","azuresentinel","azure-sentinel-solution-ciscomeraki","2021-09-08","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"CiscoMerakiNativePoller_CL","CiscoMeraki","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoMeraki","azuresentinel","azure-sentinel-solution-ciscomeraki","2021-09-08","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoMeraki","Cisco","[Deprecated] Cisco Meraki","The [Cisco Meraki](https://meraki.cisco.com/) connector allows you to easily connect your Cisco Meraki (MX/MR/MS) logs with Microsoft Sentinel. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias CiscoMeraki and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoMeraki/Parsers/CiscoMeraki.txt). The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Follow the configuration steps below to get Cisco Meraki device logs into Microsoft Sentinel. Refer to the [Azure Monitor Documentation](https://docs.microsoft.com/azure/azure-monitor/agents/data-sources-json) for more details on these steps.\n For Cisco Meraki logs, we have issues while parsing the data by OMS agent data using default settings. \nSo we advice to capture the logs into custom table **meraki_CL** using below instructions. \n1. Login to the server where you have installed OMS agent.\n2. Download config file [meraki.conf](https://aka.ms/sentinel-ciscomerakioms-conf) \n\t\twget -v https://aka.ms/sentinel-ciscomerakioms-conf -O meraki.conf \n3. Copy meraki.conf to the /etc/opt/microsoft/omsagent/**workspace_id**/conf/omsagent.d/ folder. \n\t\tcp meraki.conf /etc/opt/microsoft/omsagent/<>/conf/omsagent.d/\n4. Edit meraki.conf as follows:\n\n\t a. meraki.conf uses the port **22033** by default. Ensure this port is not being used by any other source on your server\n\n\t b. If you would like to change the default port for **meraki.conf** make sure that you dont use default Azure monitoring /log analytic agent ports I.e.(For example CEF uses TCP port **25226** or **25224**) \n\n\t c. replace **workspace_id** with real value of your Workspace ID (lines 14,15,16,19)\n5. Save changes and restart the Azure Log Analytics agent for Linux service with the following command:\n\t\tsudo /opt/microsoft/omsagent/bin/service_control restart\n6. Modify /etc/rsyslog.conf file - add below template preferably at the beginning / before directives section \n\t\t$template meraki,\""%timestamp% %hostname% %msg%\\n\"" \n7. Create a custom conf file in /etc/rsyslog.d/ for example 10-meraki.conf and add following filter conditions.\n\n\t With an added statement you will need to create a filter which will specify the logs coming from the Cisco Meraki to be forwarded to the custom table.\n\n\t reference: [Filter Conditions \u2014 rsyslog 8.18.0.master documentation](https://rsyslog.readthedocs.io/en/latest/configuration/filters.html)\n\n\t Here is an example of filtering that can be defined, this is not complete and will require additional testing for each installation.\n\t\t if $rawmsg contains \""flows\"" then @@127.0.0.1:22033;meraki\n\t\t & stop\n\t\t if $rawmsg contains \""firewall\"" then @@127.0.0.1:22033;meraki\n\t\t & stop\n\t\t if $rawmsg contains \""urls\"" then @@127.0.0.1:22033;meraki\n\t\t & stop\n\t\t if $rawmsg contains \""ids-alerts\"" then @@127.0.0.1:22033;meraki\n\t\t & stop\n\t\t if $rawmsg contains \""events\"" then @@127.0.0.1:22033;meraki\n\t\t & stop\n\t\t if $rawmsg contains \""ip_flow_start\"" then @@127.0.0.1:22033;meraki\n\t\t & stop\n\t\t if $rawmsg contains \""ip_flow_end\"" then @@127.0.0.1:22033;meraki\n\t\t & stop \n8. Restart rsyslog\n\t\t systemctl restart rsyslog"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Configure and connect the Cisco Meraki device(s)"", ""description"": ""[Follow these instructions](https://documentation.meraki.com/General_Administration/Monitoring_and_Reporting/Meraki_Device_Reporting_-_Syslog%2C_SNMP_and_API) to configure the Cisco Meraki device(s) to forward syslog. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Cisco Meraki"", ""description"": ""must be configured to export logs via Syslog""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoMeraki/Data%20Connectors/Connector_Syslog_CiscoMeraki.json","true" -"meraki_CL","CiscoMeraki","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoMeraki","azuresentinel","azure-sentinel-solution-ciscomeraki","2021-09-08","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoMeraki","Cisco","[Deprecated] Cisco Meraki","The [Cisco Meraki](https://meraki.cisco.com/) connector allows you to easily connect your Cisco Meraki (MX/MR/MS) logs with Microsoft Sentinel. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias CiscoMeraki and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoMeraki/Parsers/CiscoMeraki.txt). The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Follow the configuration steps below to get Cisco Meraki device logs into Microsoft Sentinel. Refer to the [Azure Monitor Documentation](https://docs.microsoft.com/azure/azure-monitor/agents/data-sources-json) for more details on these steps.\n For Cisco Meraki logs, we have issues while parsing the data by OMS agent data using default settings. \nSo we advice to capture the logs into custom table **meraki_CL** using below instructions. \n1. Login to the server where you have installed OMS agent.\n2. Download config file [meraki.conf](https://aka.ms/sentinel-ciscomerakioms-conf) \n\t\twget -v https://aka.ms/sentinel-ciscomerakioms-conf -O meraki.conf \n3. Copy meraki.conf to the /etc/opt/microsoft/omsagent/**workspace_id**/conf/omsagent.d/ folder. \n\t\tcp meraki.conf /etc/opt/microsoft/omsagent/<>/conf/omsagent.d/\n4. Edit meraki.conf as follows:\n\n\t a. meraki.conf uses the port **22033** by default. Ensure this port is not being used by any other source on your server\n\n\t b. If you would like to change the default port for **meraki.conf** make sure that you dont use default Azure monitoring /log analytic agent ports I.e.(For example CEF uses TCP port **25226** or **25224**) \n\n\t c. replace **workspace_id** with real value of your Workspace ID (lines 14,15,16,19)\n5. Save changes and restart the Azure Log Analytics agent for Linux service with the following command:\n\t\tsudo /opt/microsoft/omsagent/bin/service_control restart\n6. Modify /etc/rsyslog.conf file - add below template preferably at the beginning / before directives section \n\t\t$template meraki,\""%timestamp% %hostname% %msg%\\n\"" \n7. Create a custom conf file in /etc/rsyslog.d/ for example 10-meraki.conf and add following filter conditions.\n\n\t With an added statement you will need to create a filter which will specify the logs coming from the Cisco Meraki to be forwarded to the custom table.\n\n\t reference: [Filter Conditions \u2014 rsyslog 8.18.0.master documentation](https://rsyslog.readthedocs.io/en/latest/configuration/filters.html)\n\n\t Here is an example of filtering that can be defined, this is not complete and will require additional testing for each installation.\n\t\t if $rawmsg contains \""flows\"" then @@127.0.0.1:22033;meraki\n\t\t & stop\n\t\t if $rawmsg contains \""firewall\"" then @@127.0.0.1:22033;meraki\n\t\t & stop\n\t\t if $rawmsg contains \""urls\"" then @@127.0.0.1:22033;meraki\n\t\t & stop\n\t\t if $rawmsg contains \""ids-alerts\"" then @@127.0.0.1:22033;meraki\n\t\t & stop\n\t\t if $rawmsg contains \""events\"" then @@127.0.0.1:22033;meraki\n\t\t & stop\n\t\t if $rawmsg contains \""ip_flow_start\"" then @@127.0.0.1:22033;meraki\n\t\t & stop\n\t\t if $rawmsg contains \""ip_flow_end\"" then @@127.0.0.1:22033;meraki\n\t\t & stop \n8. Restart rsyslog\n\t\t systemctl restart rsyslog"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Configure and connect the Cisco Meraki device(s)"", ""description"": ""[Follow these instructions](https://documentation.meraki.com/General_Administration/Monitoring_and_Reporting/Meraki_Device_Reporting_-_Syslog%2C_SNMP_and_API) to configure the Cisco Meraki device(s) to forward syslog. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Cisco Meraki"", ""description"": ""must be configured to export logs via Syslog""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoMeraki/Data%20Connectors/Connector_Syslog_CiscoMeraki.json","true" -"CiscoMerakiNativePoller_CL","CiscoMeraki","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoMeraki","azuresentinel","azure-sentinel-solution-ciscomeraki","2021-09-08","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoMeraki(usingRESTAPI)","Microsoft","Cisco Meraki (using REST API)","The [Cisco Meraki](https://aka.ms/ciscomeraki) connector allows you to easily connect your Cisco Meraki MX [security events](https://aka.ms/ciscomerakisecurityevents) to Microsoft Sentinel. The data connector uses [Cisco Meraki REST API](https://developer.cisco.com/meraki/api-v1/#!get-organization-appliance-security-events) to fetch security events and supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security event data into a custom columns so that queries don't need to parse it again, thus resulting in better performance.

**Supported ASIM schema:**
1. Network Session","[{""title"": ""Connect Cisco Meraki Security Events to Microsoft Sentinel"", ""description"": ""To enable Cisco Meraki Security Events for Microsoft Sentinel, provide the required information below and click on Connect.\n>This data connector depends on a parser based on a Kusto Function to render the content. [**CiscoMeraki**](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/CiscoMeraki/Parsers/CiscoMeraki.txt) Parser currently support only \""**IDS Alert**\"" and \""**File Scanned**\"" Events."", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""Organization Id"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{organization}}""}], ""transformation"": {""transformationType"": ""predefinedTransformation"", ""outputStream"": ""[concat('Custom-', variables('streamName'))]"", ""dataCollectionRuleTemplateSpecName"": ""[variables('dataCollectionRuleId')]"", ""logAnalyticsTableTemplateSpecName"": ""[variables('logAnalyticsTableId')]""}}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Cisco Meraki REST API Key"", ""description"": ""Enable API access in Cisco Meraki and generate API Key. Please refer to Cisco Meraki official [documentation](https://aka.ms/ciscomerakiapikey) for more information.""}, {""name"": ""Cisco Meraki Organization Id"", ""description"": ""Obtain your Cisco Meraki organization id to fetch security events. Follow the steps in the [documentation](https://aka.ms/ciscomerakifindorg) to obtain the Organization Id using the Meraki API Key obtained in previous step.""}]}","true","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoMeraki/Data%20Connectors/CiscoMerakiNativePollerConnector/azuredeploy_Cisco_Meraki_native_poller_connector.json","true" -"meraki_CL","CiscoMeraki","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoMeraki","azuresentinel","azure-sentinel-solution-ciscomeraki","2021-09-08","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoMeraki(usingRESTAPI)","Microsoft","Cisco Meraki (using REST API)","The [Cisco Meraki](https://aka.ms/ciscomeraki) connector allows you to easily connect your Cisco Meraki MX [security events](https://aka.ms/ciscomerakisecurityevents) to Microsoft Sentinel. The data connector uses [Cisco Meraki REST API](https://developer.cisco.com/meraki/api-v1/#!get-organization-appliance-security-events) to fetch security events and supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security event data into a custom columns so that queries don't need to parse it again, thus resulting in better performance.

**Supported ASIM schema:**
1. Network Session","[{""title"": ""Connect Cisco Meraki Security Events to Microsoft Sentinel"", ""description"": ""To enable Cisco Meraki Security Events for Microsoft Sentinel, provide the required information below and click on Connect.\n>This data connector depends on a parser based on a Kusto Function to render the content. [**CiscoMeraki**](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/CiscoMeraki/Parsers/CiscoMeraki.txt) Parser currently support only \""**IDS Alert**\"" and \""**File Scanned**\"" Events."", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""Organization Id"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{organization}}""}], ""transformation"": {""transformationType"": ""predefinedTransformation"", ""outputStream"": ""[concat('Custom-', variables('streamName'))]"", ""dataCollectionRuleTemplateSpecName"": ""[variables('dataCollectionRuleId')]"", ""logAnalyticsTableTemplateSpecName"": ""[variables('logAnalyticsTableId')]""}}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Cisco Meraki REST API Key"", ""description"": ""Enable API access in Cisco Meraki and generate API Key. Please refer to Cisco Meraki official [documentation](https://aka.ms/ciscomerakiapikey) for more information.""}, {""name"": ""Cisco Meraki Organization Id"", ""description"": ""Obtain your Cisco Meraki organization id to fetch security events. Follow the steps in the [documentation](https://aka.ms/ciscomerakifindorg) to obtain the Organization Id using the Meraki API Key obtained in previous step.""}]}","true","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoMeraki/Data%20Connectors/CiscoMerakiNativePollerConnector/azuredeploy_Cisco_Meraki_native_poller_connector.json","true" -"CiscoMerakiNativePoller_CL","CiscoMeraki","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoMeraki","azuresentinel","azure-sentinel-solution-ciscomeraki","2021-09-08","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoMerakiNativePoller","Microsoft","Cisco Meraki (using REST API)","The [Cisco Meraki](https://aka.ms/ciscomeraki) connector allows you to easily connect your Cisco Meraki MX [security events](https://aka.ms/ciscomerakisecurityevents) to Microsoft Sentinel. The data connector uses [Cisco Meraki REST API](https://developer.cisco.com/meraki/api-v1/#!get-organization-appliance-security-events) to fetch security events and supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security event data into a custom columns so that queries don't need to parse it again, thus resulting in better performance.

**Supported ASIM schema:**
1. Network Session","[{""title"": ""Connect Cisco Meraki Security Events to Microsoft Sentinel"", ""description"": ""To enable Cisco Meraki Security Events for Microsoft Sentinel, provide the required information below and click on Connect.\n>This data connector depends on a parser based on a Kusto Function to render the content. [**CiscoMeraki**](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/CiscoMeraki/Parsers/CiscoMeraki.txt) Parser currently support only \""**IDS Alert**\"" and \""**File Scanned**\"" Events."", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""Organization Id"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{organization}}""}], ""transformation"": {""transformationType"": ""predefinedTransformation"", ""outputStream"": ""[concat('Custom-', variables('streamName'))]"", ""dataCollectionRuleTemplateSpecName"": ""[variables('dataCollectionRuleId')]"", ""logAnalyticsTableTemplateSpecName"": ""[variables('logAnalyticsTableId')]""}}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Cisco Meraki REST API Key"", ""description"": ""Enable API access in Cisco Meraki and generate API Key. Please refer to Cisco Meraki official [documentation](https://aka.ms/ciscomerakiapikey) for more information.""}, {""name"": ""Cisco Meraki Organization Id"", ""description"": ""Obtain your Cisco Meraki organization id to fetch security events. Follow the steps in the [documentation](https://aka.ms/ciscomerakifindorg) to obtain the Organization Id using the Meraki API Key obtained in previous step.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoMeraki/Data%20Connectors/CiscoMerakiNativePollerConnector/azuredeploy_Cisco_Meraki_native_poller_connector.json","true" -"meraki_CL","CiscoMeraki","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoMeraki","azuresentinel","azure-sentinel-solution-ciscomeraki","2021-09-08","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoMerakiNativePoller","Microsoft","Cisco Meraki (using REST API)","The [Cisco Meraki](https://aka.ms/ciscomeraki) connector allows you to easily connect your Cisco Meraki MX [security events](https://aka.ms/ciscomerakisecurityevents) to Microsoft Sentinel. The data connector uses [Cisco Meraki REST API](https://developer.cisco.com/meraki/api-v1/#!get-organization-appliance-security-events) to fetch security events and supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security event data into a custom columns so that queries don't need to parse it again, thus resulting in better performance.

**Supported ASIM schema:**
1. Network Session","[{""title"": ""Connect Cisco Meraki Security Events to Microsoft Sentinel"", ""description"": ""To enable Cisco Meraki Security Events for Microsoft Sentinel, provide the required information below and click on Connect.\n>This data connector depends on a parser based on a Kusto Function to render the content. [**CiscoMeraki**](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/CiscoMeraki/Parsers/CiscoMeraki.txt) Parser currently support only \""**IDS Alert**\"" and \""**File Scanned**\"" Events."", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""Organization Id"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{organization}}""}], ""transformation"": {""transformationType"": ""predefinedTransformation"", ""outputStream"": ""[concat('Custom-', variables('streamName'))]"", ""dataCollectionRuleTemplateSpecName"": ""[variables('dataCollectionRuleId')]"", ""logAnalyticsTableTemplateSpecName"": ""[variables('logAnalyticsTableId')]""}}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Cisco Meraki REST API Key"", ""description"": ""Enable API access in Cisco Meraki and generate API Key. Please refer to Cisco Meraki official [documentation](https://aka.ms/ciscomerakiapikey) for more information.""}, {""name"": ""Cisco Meraki Organization Id"", ""description"": ""Obtain your Cisco Meraki organization id to fetch security events. Follow the steps in the [documentation](https://aka.ms/ciscomerakifindorg) to obtain the Organization Id using the Meraki API Key obtained in previous step.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoMeraki/Data%20Connectors/CiscoMerakiNativePollerConnector/azuredeploy_Cisco_Meraki_native_poller_connector.json","true" -"","CiscoSEG","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoSEG","azuresentinel","azure-sentinel-solution-ciscoseg","2021-06-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"CommonSecurityLog","CiscoSEG","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoSEG","azuresentinel","azure-sentinel-solution-ciscoseg","2021-06-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoSEG","Cisco","[Deprecated] Cisco Secure Email Gateway via Legacy Agent","The [Cisco Secure Email Gateway (SEG)](https://www.cisco.com/c/en/us/products/security/email-security/index.html) data connector provides the capability to ingest [Cisco SEG Consolidated Event Logs](https://www.cisco.com/c/en/us/td/docs/security/esa/esa14-0/user_guide/b_ESA_Admin_Guide_14-0/b_ESA_Admin_Guide_12_1_chapter_0100111.html#con_1061902) into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**CiscoSEGEvent**](https://aka.ms/sentinel-CiscoSEG-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**NOTE:** This data connector has been developed using AsyncOS 14.0 for Cisco Secure Email Gateway"", ""instructions"": []}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Follow these steps to configure Cisco Secure Email Gateway to forward logs via syslog:\n\n2.1. Configure [Log Subscription](https://www.cisco.com/c/en/us/td/docs/security/esa/esa14-0/user_guide/b_ESA_Admin_Guide_14-0/b_ESA_Admin_Guide_12_1_chapter_0100111.html#con_1134718)\n\n>**NOTE:** Select **Consolidated Event Logs** in Log Type field.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoSEG/Data%20Connectors/Connector_Cisco_SEG_CEF.json","true" -"CommonSecurityLog","CiscoSEG","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoSEG","azuresentinel","azure-sentinel-solution-ciscoseg","2021-06-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoSEGAma","Cisco","[Deprecated] Cisco Secure Email Gateway via AMA","The [Cisco Secure Email Gateway (SEG)](https://www.cisco.com/c/en/us/products/security/email-security/index.html) data connector provides the capability to ingest [Cisco SEG Consolidated Event Logs](https://www.cisco.com/c/en/us/td/docs/security/esa/esa14-0/user_guide/b_ESA_Admin_Guide_14-0/b_ESA_Admin_Guide_12_1_chapter_0100111.html#con_1061902) into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**CiscoSEGEvent**](https://aka.ms/sentinel-CiscoSEG-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Follow these steps to configure Cisco Secure Email Gateway to forward logs via syslog:\n\n Configure [Log Subscription](https://www.cisco.com/c/en/us/td/docs/security/esa/esa14-0/user_guide/b_ESA_Admin_Guide_14-0/b_ESA_Admin_Guide_12_1_chapter_0100111.html#con_1134718)\n\n>**NOTE:** Select **Consolidated Event Logs** in Log Type field."", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""2Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoSEG/Data%20Connectors/template_CiscoSEGAMA.json","true" -"","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"Cisco_Umbrella_audit_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnector","Cisco","Cisco Cloud Security","The Cisco Cloud Security solution for Microsoft Sentinel enables you to ingest [Cisco Secure Access](https://docs.sse.cisco.com/sse-user-guide/docs/welcome-cisco-secure-access) and [Cisco Umbrella](https://docs.umbrella.com/umbrella-user-guide/docs/getting-started) [logs](https://docs.sse.cisco.com/sse-user-guide/docs/manage-your-logs) stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Cloud Security log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [Cisco Cloud Security log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration of the Cisco Cloud Security logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Cloud Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Cloud Security data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Cloud Security data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp.json","true" -"Cisco_Umbrella_cloudfirewall_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnector","Cisco","Cisco Cloud Security","The Cisco Cloud Security solution for Microsoft Sentinel enables you to ingest [Cisco Secure Access](https://docs.sse.cisco.com/sse-user-guide/docs/welcome-cisco-secure-access) and [Cisco Umbrella](https://docs.umbrella.com/umbrella-user-guide/docs/getting-started) [logs](https://docs.sse.cisco.com/sse-user-guide/docs/manage-your-logs) stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Cloud Security log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [Cisco Cloud Security log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration of the Cisco Cloud Security logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Cloud Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Cloud Security data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Cloud Security data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp.json","true" -"Cisco_Umbrella_dlp_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnector","Cisco","Cisco Cloud Security","The Cisco Cloud Security solution for Microsoft Sentinel enables you to ingest [Cisco Secure Access](https://docs.sse.cisco.com/sse-user-guide/docs/welcome-cisco-secure-access) and [Cisco Umbrella](https://docs.umbrella.com/umbrella-user-guide/docs/getting-started) [logs](https://docs.sse.cisco.com/sse-user-guide/docs/manage-your-logs) stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Cloud Security log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [Cisco Cloud Security log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration of the Cisco Cloud Security logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Cloud Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Cloud Security data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Cloud Security data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp.json","true" -"Cisco_Umbrella_dns_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnector","Cisco","Cisco Cloud Security","The Cisco Cloud Security solution for Microsoft Sentinel enables you to ingest [Cisco Secure Access](https://docs.sse.cisco.com/sse-user-guide/docs/welcome-cisco-secure-access) and [Cisco Umbrella](https://docs.umbrella.com/umbrella-user-guide/docs/getting-started) [logs](https://docs.sse.cisco.com/sse-user-guide/docs/manage-your-logs) stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Cloud Security log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [Cisco Cloud Security log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration of the Cisco Cloud Security logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Cloud Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Cloud Security data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Cloud Security data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp.json","true" -"Cisco_Umbrella_fileevent_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnector","Cisco","Cisco Cloud Security","The Cisco Cloud Security solution for Microsoft Sentinel enables you to ingest [Cisco Secure Access](https://docs.sse.cisco.com/sse-user-guide/docs/welcome-cisco-secure-access) and [Cisco Umbrella](https://docs.umbrella.com/umbrella-user-guide/docs/getting-started) [logs](https://docs.sse.cisco.com/sse-user-guide/docs/manage-your-logs) stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Cloud Security log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [Cisco Cloud Security log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration of the Cisco Cloud Security logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Cloud Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Cloud Security data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Cloud Security data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp.json","true" -"Cisco_Umbrella_firewall_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnector","Cisco","Cisco Cloud Security","The Cisco Cloud Security solution for Microsoft Sentinel enables you to ingest [Cisco Secure Access](https://docs.sse.cisco.com/sse-user-guide/docs/welcome-cisco-secure-access) and [Cisco Umbrella](https://docs.umbrella.com/umbrella-user-guide/docs/getting-started) [logs](https://docs.sse.cisco.com/sse-user-guide/docs/manage-your-logs) stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Cloud Security log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [Cisco Cloud Security log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration of the Cisco Cloud Security logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Cloud Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Cloud Security data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Cloud Security data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp.json","true" -"Cisco_Umbrella_intrusion_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnector","Cisco","Cisco Cloud Security","The Cisco Cloud Security solution for Microsoft Sentinel enables you to ingest [Cisco Secure Access](https://docs.sse.cisco.com/sse-user-guide/docs/welcome-cisco-secure-access) and [Cisco Umbrella](https://docs.umbrella.com/umbrella-user-guide/docs/getting-started) [logs](https://docs.sse.cisco.com/sse-user-guide/docs/manage-your-logs) stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Cloud Security log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [Cisco Cloud Security log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration of the Cisco Cloud Security logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Cloud Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Cloud Security data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Cloud Security data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp.json","true" -"Cisco_Umbrella_ip_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnector","Cisco","Cisco Cloud Security","The Cisco Cloud Security solution for Microsoft Sentinel enables you to ingest [Cisco Secure Access](https://docs.sse.cisco.com/sse-user-guide/docs/welcome-cisco-secure-access) and [Cisco Umbrella](https://docs.umbrella.com/umbrella-user-guide/docs/getting-started) [logs](https://docs.sse.cisco.com/sse-user-guide/docs/manage-your-logs) stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Cloud Security log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [Cisco Cloud Security log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration of the Cisco Cloud Security logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Cloud Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Cloud Security data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Cloud Security data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp.json","true" -"Cisco_Umbrella_proxy_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnector","Cisco","Cisco Cloud Security","The Cisco Cloud Security solution for Microsoft Sentinel enables you to ingest [Cisco Secure Access](https://docs.sse.cisco.com/sse-user-guide/docs/welcome-cisco-secure-access) and [Cisco Umbrella](https://docs.umbrella.com/umbrella-user-guide/docs/getting-started) [logs](https://docs.sse.cisco.com/sse-user-guide/docs/manage-your-logs) stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Cloud Security log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [Cisco Cloud Security log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration of the Cisco Cloud Security logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Cloud Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Cloud Security data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Cloud Security data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp.json","true" -"Cisco_Umbrella_ravpnlogs_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnector","Cisco","Cisco Cloud Security","The Cisco Cloud Security solution for Microsoft Sentinel enables you to ingest [Cisco Secure Access](https://docs.sse.cisco.com/sse-user-guide/docs/welcome-cisco-secure-access) and [Cisco Umbrella](https://docs.umbrella.com/umbrella-user-guide/docs/getting-started) [logs](https://docs.sse.cisco.com/sse-user-guide/docs/manage-your-logs) stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Cloud Security log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [Cisco Cloud Security log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration of the Cisco Cloud Security logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Cloud Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Cloud Security data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Cloud Security data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp.json","true" -"Cisco_Umbrella_ztaflow_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnector","Cisco","Cisco Cloud Security","The Cisco Cloud Security solution for Microsoft Sentinel enables you to ingest [Cisco Secure Access](https://docs.sse.cisco.com/sse-user-guide/docs/welcome-cisco-secure-access) and [Cisco Umbrella](https://docs.umbrella.com/umbrella-user-guide/docs/getting-started) [logs](https://docs.sse.cisco.com/sse-user-guide/docs/manage-your-logs) stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Cloud Security log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [Cisco Cloud Security log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration of the Cisco Cloud Security logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Cloud Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Cloud Security data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Cloud Security data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp.json","true" -"Cisco_Umbrella_ztna_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnector","Cisco","Cisco Cloud Security","The Cisco Cloud Security solution for Microsoft Sentinel enables you to ingest [Cisco Secure Access](https://docs.sse.cisco.com/sse-user-guide/docs/welcome-cisco-secure-access) and [Cisco Umbrella](https://docs.umbrella.com/umbrella-user-guide/docs/getting-started) [logs](https://docs.sse.cisco.com/sse-user-guide/docs/manage-your-logs) stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Cloud Security log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [Cisco Cloud Security log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration of the Cisco Cloud Security logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Cloud Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Cloud Security data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Cloud Security data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp.json","true" -"Cisco_Umbrella_audit_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnectorelasticpremium","Cisco","Cisco Cloud Security (using elastic premium plan)","The Cisco Umbrella data connector provides the capability to ingest [Cisco Umbrella](https://docs.umbrella.com/) events stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Umbrella log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.

**NOTE:** This data connector uses the [Azure Functions Premium Plan](https://learn.microsoft.com/azure/azure-functions/functions-premium-plan?tabs=portal) to enable secure ingestion capabilities and will incur additional costs. More pricing details are [here](https://azure.microsoft.com/pricing/details/functions/?msockid=2f4366822d836a7c2ac673462cfc6ba8#pricing).","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [cisco umbrella log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Network Prerequisites for Private Access**\n\n>**IMPORTANT:** When deploying with private storage account access, ensure the following network prerequisites are met:\n> - **Virtual Network**: An existing Virtual Network (VNet) must be available\n> - **Subnet**: A dedicated subnet within the VNet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration\n> - **Subnet Delegation**: Configure the subnet delegation using Azure Portal, ARM template, or Azure CLI:\n> - Azure Portal: Go to Virtual networks \u2192 Select your VNet \u2192 Subnets \u2192 Select subnet \u2192 Delegate subnet to service \u2192 Choose **Microsoft.Web/serverFarms**\n> - Azure CLI: `az network vnet subnet update --resource-group --vnet-name --name --delegations Microsoft.Web/serverFarms`\n> - **Private Endpoints**: The deployment will create private endpoints for storage account services (blob, file, queue, table) within the same subnet""}, {""title"": """", ""description"": ""**STEP 2 - Configuration of the Cisco Umbrella logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Umbrella data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Umbrella data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n4. **For Private Access Deployment**: Also enter **existingVnetName**, **existingVnetResourceGroupName**, and **existingSubnetName** (ensure subnet is delegated to Microsoft.Web/serverFarms)\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n5. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n6. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Umbrella data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}, {""name"": ""Virtual Network permissions (for private access)"", ""description"": ""For private storage account access, **Network Contributor** permissions are required on the Virtual Network and subnet. The subnet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp_elasticpremium.json","true" -"Cisco_Umbrella_cloudfirewall_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnectorelasticpremium","Cisco","Cisco Cloud Security (using elastic premium plan)","The Cisco Umbrella data connector provides the capability to ingest [Cisco Umbrella](https://docs.umbrella.com/) events stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Umbrella log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.

**NOTE:** This data connector uses the [Azure Functions Premium Plan](https://learn.microsoft.com/azure/azure-functions/functions-premium-plan?tabs=portal) to enable secure ingestion capabilities and will incur additional costs. More pricing details are [here](https://azure.microsoft.com/pricing/details/functions/?msockid=2f4366822d836a7c2ac673462cfc6ba8#pricing).","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [cisco umbrella log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Network Prerequisites for Private Access**\n\n>**IMPORTANT:** When deploying with private storage account access, ensure the following network prerequisites are met:\n> - **Virtual Network**: An existing Virtual Network (VNet) must be available\n> - **Subnet**: A dedicated subnet within the VNet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration\n> - **Subnet Delegation**: Configure the subnet delegation using Azure Portal, ARM template, or Azure CLI:\n> - Azure Portal: Go to Virtual networks \u2192 Select your VNet \u2192 Subnets \u2192 Select subnet \u2192 Delegate subnet to service \u2192 Choose **Microsoft.Web/serverFarms**\n> - Azure CLI: `az network vnet subnet update --resource-group --vnet-name --name --delegations Microsoft.Web/serverFarms`\n> - **Private Endpoints**: The deployment will create private endpoints for storage account services (blob, file, queue, table) within the same subnet""}, {""title"": """", ""description"": ""**STEP 2 - Configuration of the Cisco Umbrella logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Umbrella data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Umbrella data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n4. **For Private Access Deployment**: Also enter **existingVnetName**, **existingVnetResourceGroupName**, and **existingSubnetName** (ensure subnet is delegated to Microsoft.Web/serverFarms)\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n5. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n6. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Umbrella data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}, {""name"": ""Virtual Network permissions (for private access)"", ""description"": ""For private storage account access, **Network Contributor** permissions are required on the Virtual Network and subnet. The subnet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp_elasticpremium.json","true" -"Cisco_Umbrella_dlp_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnectorelasticpremium","Cisco","Cisco Cloud Security (using elastic premium plan)","The Cisco Umbrella data connector provides the capability to ingest [Cisco Umbrella](https://docs.umbrella.com/) events stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Umbrella log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.

**NOTE:** This data connector uses the [Azure Functions Premium Plan](https://learn.microsoft.com/azure/azure-functions/functions-premium-plan?tabs=portal) to enable secure ingestion capabilities and will incur additional costs. More pricing details are [here](https://azure.microsoft.com/pricing/details/functions/?msockid=2f4366822d836a7c2ac673462cfc6ba8#pricing).","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [cisco umbrella log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Network Prerequisites for Private Access**\n\n>**IMPORTANT:** When deploying with private storage account access, ensure the following network prerequisites are met:\n> - **Virtual Network**: An existing Virtual Network (VNet) must be available\n> - **Subnet**: A dedicated subnet within the VNet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration\n> - **Subnet Delegation**: Configure the subnet delegation using Azure Portal, ARM template, or Azure CLI:\n> - Azure Portal: Go to Virtual networks \u2192 Select your VNet \u2192 Subnets \u2192 Select subnet \u2192 Delegate subnet to service \u2192 Choose **Microsoft.Web/serverFarms**\n> - Azure CLI: `az network vnet subnet update --resource-group --vnet-name --name --delegations Microsoft.Web/serverFarms`\n> - **Private Endpoints**: The deployment will create private endpoints for storage account services (blob, file, queue, table) within the same subnet""}, {""title"": """", ""description"": ""**STEP 2 - Configuration of the Cisco Umbrella logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Umbrella data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Umbrella data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n4. **For Private Access Deployment**: Also enter **existingVnetName**, **existingVnetResourceGroupName**, and **existingSubnetName** (ensure subnet is delegated to Microsoft.Web/serverFarms)\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n5. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n6. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Umbrella data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}, {""name"": ""Virtual Network permissions (for private access)"", ""description"": ""For private storage account access, **Network Contributor** permissions are required on the Virtual Network and subnet. The subnet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp_elasticpremium.json","true" -"Cisco_Umbrella_dns_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnectorelasticpremium","Cisco","Cisco Cloud Security (using elastic premium plan)","The Cisco Umbrella data connector provides the capability to ingest [Cisco Umbrella](https://docs.umbrella.com/) events stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Umbrella log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.

**NOTE:** This data connector uses the [Azure Functions Premium Plan](https://learn.microsoft.com/azure/azure-functions/functions-premium-plan?tabs=portal) to enable secure ingestion capabilities and will incur additional costs. More pricing details are [here](https://azure.microsoft.com/pricing/details/functions/?msockid=2f4366822d836a7c2ac673462cfc6ba8#pricing).","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [cisco umbrella log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Network Prerequisites for Private Access**\n\n>**IMPORTANT:** When deploying with private storage account access, ensure the following network prerequisites are met:\n> - **Virtual Network**: An existing Virtual Network (VNet) must be available\n> - **Subnet**: A dedicated subnet within the VNet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration\n> - **Subnet Delegation**: Configure the subnet delegation using Azure Portal, ARM template, or Azure CLI:\n> - Azure Portal: Go to Virtual networks \u2192 Select your VNet \u2192 Subnets \u2192 Select subnet \u2192 Delegate subnet to service \u2192 Choose **Microsoft.Web/serverFarms**\n> - Azure CLI: `az network vnet subnet update --resource-group --vnet-name --name --delegations Microsoft.Web/serverFarms`\n> - **Private Endpoints**: The deployment will create private endpoints for storage account services (blob, file, queue, table) within the same subnet""}, {""title"": """", ""description"": ""**STEP 2 - Configuration of the Cisco Umbrella logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Umbrella data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Umbrella data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n4. **For Private Access Deployment**: Also enter **existingVnetName**, **existingVnetResourceGroupName**, and **existingSubnetName** (ensure subnet is delegated to Microsoft.Web/serverFarms)\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n5. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n6. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Umbrella data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}, {""name"": ""Virtual Network permissions (for private access)"", ""description"": ""For private storage account access, **Network Contributor** permissions are required on the Virtual Network and subnet. The subnet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp_elasticpremium.json","true" -"Cisco_Umbrella_fileevent_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnectorelasticpremium","Cisco","Cisco Cloud Security (using elastic premium plan)","The Cisco Umbrella data connector provides the capability to ingest [Cisco Umbrella](https://docs.umbrella.com/) events stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Umbrella log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.

**NOTE:** This data connector uses the [Azure Functions Premium Plan](https://learn.microsoft.com/azure/azure-functions/functions-premium-plan?tabs=portal) to enable secure ingestion capabilities and will incur additional costs. More pricing details are [here](https://azure.microsoft.com/pricing/details/functions/?msockid=2f4366822d836a7c2ac673462cfc6ba8#pricing).","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [cisco umbrella log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Network Prerequisites for Private Access**\n\n>**IMPORTANT:** When deploying with private storage account access, ensure the following network prerequisites are met:\n> - **Virtual Network**: An existing Virtual Network (VNet) must be available\n> - **Subnet**: A dedicated subnet within the VNet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration\n> - **Subnet Delegation**: Configure the subnet delegation using Azure Portal, ARM template, or Azure CLI:\n> - Azure Portal: Go to Virtual networks \u2192 Select your VNet \u2192 Subnets \u2192 Select subnet \u2192 Delegate subnet to service \u2192 Choose **Microsoft.Web/serverFarms**\n> - Azure CLI: `az network vnet subnet update --resource-group --vnet-name --name --delegations Microsoft.Web/serverFarms`\n> - **Private Endpoints**: The deployment will create private endpoints for storage account services (blob, file, queue, table) within the same subnet""}, {""title"": """", ""description"": ""**STEP 2 - Configuration of the Cisco Umbrella logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Umbrella data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Umbrella data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n4. **For Private Access Deployment**: Also enter **existingVnetName**, **existingVnetResourceGroupName**, and **existingSubnetName** (ensure subnet is delegated to Microsoft.Web/serverFarms)\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n5. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n6. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Umbrella data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}, {""name"": ""Virtual Network permissions (for private access)"", ""description"": ""For private storage account access, **Network Contributor** permissions are required on the Virtual Network and subnet. The subnet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp_elasticpremium.json","true" -"Cisco_Umbrella_firewall_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnectorelasticpremium","Cisco","Cisco Cloud Security (using elastic premium plan)","The Cisco Umbrella data connector provides the capability to ingest [Cisco Umbrella](https://docs.umbrella.com/) events stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Umbrella log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.

**NOTE:** This data connector uses the [Azure Functions Premium Plan](https://learn.microsoft.com/azure/azure-functions/functions-premium-plan?tabs=portal) to enable secure ingestion capabilities and will incur additional costs. More pricing details are [here](https://azure.microsoft.com/pricing/details/functions/?msockid=2f4366822d836a7c2ac673462cfc6ba8#pricing).","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [cisco umbrella log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Network Prerequisites for Private Access**\n\n>**IMPORTANT:** When deploying with private storage account access, ensure the following network prerequisites are met:\n> - **Virtual Network**: An existing Virtual Network (VNet) must be available\n> - **Subnet**: A dedicated subnet within the VNet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration\n> - **Subnet Delegation**: Configure the subnet delegation using Azure Portal, ARM template, or Azure CLI:\n> - Azure Portal: Go to Virtual networks \u2192 Select your VNet \u2192 Subnets \u2192 Select subnet \u2192 Delegate subnet to service \u2192 Choose **Microsoft.Web/serverFarms**\n> - Azure CLI: `az network vnet subnet update --resource-group --vnet-name --name --delegations Microsoft.Web/serverFarms`\n> - **Private Endpoints**: The deployment will create private endpoints for storage account services (blob, file, queue, table) within the same subnet""}, {""title"": """", ""description"": ""**STEP 2 - Configuration of the Cisco Umbrella logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Umbrella data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Umbrella data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n4. **For Private Access Deployment**: Also enter **existingVnetName**, **existingVnetResourceGroupName**, and **existingSubnetName** (ensure subnet is delegated to Microsoft.Web/serverFarms)\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n5. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n6. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Umbrella data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}, {""name"": ""Virtual Network permissions (for private access)"", ""description"": ""For private storage account access, **Network Contributor** permissions are required on the Virtual Network and subnet. The subnet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp_elasticpremium.json","true" -"Cisco_Umbrella_intrusion_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnectorelasticpremium","Cisco","Cisco Cloud Security (using elastic premium plan)","The Cisco Umbrella data connector provides the capability to ingest [Cisco Umbrella](https://docs.umbrella.com/) events stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Umbrella log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.

**NOTE:** This data connector uses the [Azure Functions Premium Plan](https://learn.microsoft.com/azure/azure-functions/functions-premium-plan?tabs=portal) to enable secure ingestion capabilities and will incur additional costs. More pricing details are [here](https://azure.microsoft.com/pricing/details/functions/?msockid=2f4366822d836a7c2ac673462cfc6ba8#pricing).","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [cisco umbrella log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Network Prerequisites for Private Access**\n\n>**IMPORTANT:** When deploying with private storage account access, ensure the following network prerequisites are met:\n> - **Virtual Network**: An existing Virtual Network (VNet) must be available\n> - **Subnet**: A dedicated subnet within the VNet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration\n> - **Subnet Delegation**: Configure the subnet delegation using Azure Portal, ARM template, or Azure CLI:\n> - Azure Portal: Go to Virtual networks \u2192 Select your VNet \u2192 Subnets \u2192 Select subnet \u2192 Delegate subnet to service \u2192 Choose **Microsoft.Web/serverFarms**\n> - Azure CLI: `az network vnet subnet update --resource-group --vnet-name --name --delegations Microsoft.Web/serverFarms`\n> - **Private Endpoints**: The deployment will create private endpoints for storage account services (blob, file, queue, table) within the same subnet""}, {""title"": """", ""description"": ""**STEP 2 - Configuration of the Cisco Umbrella logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Umbrella data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Umbrella data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n4. **For Private Access Deployment**: Also enter **existingVnetName**, **existingVnetResourceGroupName**, and **existingSubnetName** (ensure subnet is delegated to Microsoft.Web/serverFarms)\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n5. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n6. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Umbrella data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}, {""name"": ""Virtual Network permissions (for private access)"", ""description"": ""For private storage account access, **Network Contributor** permissions are required on the Virtual Network and subnet. The subnet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp_elasticpremium.json","true" -"Cisco_Umbrella_ip_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnectorelasticpremium","Cisco","Cisco Cloud Security (using elastic premium plan)","The Cisco Umbrella data connector provides the capability to ingest [Cisco Umbrella](https://docs.umbrella.com/) events stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Umbrella log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.

**NOTE:** This data connector uses the [Azure Functions Premium Plan](https://learn.microsoft.com/azure/azure-functions/functions-premium-plan?tabs=portal) to enable secure ingestion capabilities and will incur additional costs. More pricing details are [here](https://azure.microsoft.com/pricing/details/functions/?msockid=2f4366822d836a7c2ac673462cfc6ba8#pricing).","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [cisco umbrella log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Network Prerequisites for Private Access**\n\n>**IMPORTANT:** When deploying with private storage account access, ensure the following network prerequisites are met:\n> - **Virtual Network**: An existing Virtual Network (VNet) must be available\n> - **Subnet**: A dedicated subnet within the VNet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration\n> - **Subnet Delegation**: Configure the subnet delegation using Azure Portal, ARM template, or Azure CLI:\n> - Azure Portal: Go to Virtual networks \u2192 Select your VNet \u2192 Subnets \u2192 Select subnet \u2192 Delegate subnet to service \u2192 Choose **Microsoft.Web/serverFarms**\n> - Azure CLI: `az network vnet subnet update --resource-group --vnet-name --name --delegations Microsoft.Web/serverFarms`\n> - **Private Endpoints**: The deployment will create private endpoints for storage account services (blob, file, queue, table) within the same subnet""}, {""title"": """", ""description"": ""**STEP 2 - Configuration of the Cisco Umbrella logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Umbrella data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Umbrella data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n4. **For Private Access Deployment**: Also enter **existingVnetName**, **existingVnetResourceGroupName**, and **existingSubnetName** (ensure subnet is delegated to Microsoft.Web/serverFarms)\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n5. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n6. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Umbrella data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}, {""name"": ""Virtual Network permissions (for private access)"", ""description"": ""For private storage account access, **Network Contributor** permissions are required on the Virtual Network and subnet. The subnet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp_elasticpremium.json","true" -"Cisco_Umbrella_proxy_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnectorelasticpremium","Cisco","Cisco Cloud Security (using elastic premium plan)","The Cisco Umbrella data connector provides the capability to ingest [Cisco Umbrella](https://docs.umbrella.com/) events stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Umbrella log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.

**NOTE:** This data connector uses the [Azure Functions Premium Plan](https://learn.microsoft.com/azure/azure-functions/functions-premium-plan?tabs=portal) to enable secure ingestion capabilities and will incur additional costs. More pricing details are [here](https://azure.microsoft.com/pricing/details/functions/?msockid=2f4366822d836a7c2ac673462cfc6ba8#pricing).","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [cisco umbrella log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Network Prerequisites for Private Access**\n\n>**IMPORTANT:** When deploying with private storage account access, ensure the following network prerequisites are met:\n> - **Virtual Network**: An existing Virtual Network (VNet) must be available\n> - **Subnet**: A dedicated subnet within the VNet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration\n> - **Subnet Delegation**: Configure the subnet delegation using Azure Portal, ARM template, or Azure CLI:\n> - Azure Portal: Go to Virtual networks \u2192 Select your VNet \u2192 Subnets \u2192 Select subnet \u2192 Delegate subnet to service \u2192 Choose **Microsoft.Web/serverFarms**\n> - Azure CLI: `az network vnet subnet update --resource-group --vnet-name --name --delegations Microsoft.Web/serverFarms`\n> - **Private Endpoints**: The deployment will create private endpoints for storage account services (blob, file, queue, table) within the same subnet""}, {""title"": """", ""description"": ""**STEP 2 - Configuration of the Cisco Umbrella logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Umbrella data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Umbrella data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n4. **For Private Access Deployment**: Also enter **existingVnetName**, **existingVnetResourceGroupName**, and **existingSubnetName** (ensure subnet is delegated to Microsoft.Web/serverFarms)\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n5. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n6. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Umbrella data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}, {""name"": ""Virtual Network permissions (for private access)"", ""description"": ""For private storage account access, **Network Contributor** permissions are required on the Virtual Network and subnet. The subnet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp_elasticpremium.json","true" -"Cisco_Umbrella_ravpnlogs_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnectorelasticpremium","Cisco","Cisco Cloud Security (using elastic premium plan)","The Cisco Umbrella data connector provides the capability to ingest [Cisco Umbrella](https://docs.umbrella.com/) events stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Umbrella log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.

**NOTE:** This data connector uses the [Azure Functions Premium Plan](https://learn.microsoft.com/azure/azure-functions/functions-premium-plan?tabs=portal) to enable secure ingestion capabilities and will incur additional costs. More pricing details are [here](https://azure.microsoft.com/pricing/details/functions/?msockid=2f4366822d836a7c2ac673462cfc6ba8#pricing).","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [cisco umbrella log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Network Prerequisites for Private Access**\n\n>**IMPORTANT:** When deploying with private storage account access, ensure the following network prerequisites are met:\n> - **Virtual Network**: An existing Virtual Network (VNet) must be available\n> - **Subnet**: A dedicated subnet within the VNet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration\n> - **Subnet Delegation**: Configure the subnet delegation using Azure Portal, ARM template, or Azure CLI:\n> - Azure Portal: Go to Virtual networks \u2192 Select your VNet \u2192 Subnets \u2192 Select subnet \u2192 Delegate subnet to service \u2192 Choose **Microsoft.Web/serverFarms**\n> - Azure CLI: `az network vnet subnet update --resource-group --vnet-name --name --delegations Microsoft.Web/serverFarms`\n> - **Private Endpoints**: The deployment will create private endpoints for storage account services (blob, file, queue, table) within the same subnet""}, {""title"": """", ""description"": ""**STEP 2 - Configuration of the Cisco Umbrella logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Umbrella data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Umbrella data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n4. **For Private Access Deployment**: Also enter **existingVnetName**, **existingVnetResourceGroupName**, and **existingSubnetName** (ensure subnet is delegated to Microsoft.Web/serverFarms)\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n5. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n6. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Umbrella data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}, {""name"": ""Virtual Network permissions (for private access)"", ""description"": ""For private storage account access, **Network Contributor** permissions are required on the Virtual Network and subnet. The subnet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp_elasticpremium.json","true" -"Cisco_Umbrella_ztaflow_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnectorelasticpremium","Cisco","Cisco Cloud Security (using elastic premium plan)","The Cisco Umbrella data connector provides the capability to ingest [Cisco Umbrella](https://docs.umbrella.com/) events stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Umbrella log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.

**NOTE:** This data connector uses the [Azure Functions Premium Plan](https://learn.microsoft.com/azure/azure-functions/functions-premium-plan?tabs=portal) to enable secure ingestion capabilities and will incur additional costs. More pricing details are [here](https://azure.microsoft.com/pricing/details/functions/?msockid=2f4366822d836a7c2ac673462cfc6ba8#pricing).","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [cisco umbrella log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Network Prerequisites for Private Access**\n\n>**IMPORTANT:** When deploying with private storage account access, ensure the following network prerequisites are met:\n> - **Virtual Network**: An existing Virtual Network (VNet) must be available\n> - **Subnet**: A dedicated subnet within the VNet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration\n> - **Subnet Delegation**: Configure the subnet delegation using Azure Portal, ARM template, or Azure CLI:\n> - Azure Portal: Go to Virtual networks \u2192 Select your VNet \u2192 Subnets \u2192 Select subnet \u2192 Delegate subnet to service \u2192 Choose **Microsoft.Web/serverFarms**\n> - Azure CLI: `az network vnet subnet update --resource-group --vnet-name --name --delegations Microsoft.Web/serverFarms`\n> - **Private Endpoints**: The deployment will create private endpoints for storage account services (blob, file, queue, table) within the same subnet""}, {""title"": """", ""description"": ""**STEP 2 - Configuration of the Cisco Umbrella logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Umbrella data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Umbrella data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n4. **For Private Access Deployment**: Also enter **existingVnetName**, **existingVnetResourceGroupName**, and **existingSubnetName** (ensure subnet is delegated to Microsoft.Web/serverFarms)\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n5. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n6. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Umbrella data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}, {""name"": ""Virtual Network permissions (for private access)"", ""description"": ""For private storage account access, **Network Contributor** permissions are required on the Virtual Network and subnet. The subnet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp_elasticpremium.json","true" -"Cisco_Umbrella_ztna_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnectorelasticpremium","Cisco","Cisco Cloud Security (using elastic premium plan)","The Cisco Umbrella data connector provides the capability to ingest [Cisco Umbrella](https://docs.umbrella.com/) events stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Umbrella log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.

**NOTE:** This data connector uses the [Azure Functions Premium Plan](https://learn.microsoft.com/azure/azure-functions/functions-premium-plan?tabs=portal) to enable secure ingestion capabilities and will incur additional costs. More pricing details are [here](https://azure.microsoft.com/pricing/details/functions/?msockid=2f4366822d836a7c2ac673462cfc6ba8#pricing).","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [cisco umbrella log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Network Prerequisites for Private Access**\n\n>**IMPORTANT:** When deploying with private storage account access, ensure the following network prerequisites are met:\n> - **Virtual Network**: An existing Virtual Network (VNet) must be available\n> - **Subnet**: A dedicated subnet within the VNet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration\n> - **Subnet Delegation**: Configure the subnet delegation using Azure Portal, ARM template, or Azure CLI:\n> - Azure Portal: Go to Virtual networks \u2192 Select your VNet \u2192 Subnets \u2192 Select subnet \u2192 Delegate subnet to service \u2192 Choose **Microsoft.Web/serverFarms**\n> - Azure CLI: `az network vnet subnet update --resource-group --vnet-name --name --delegations Microsoft.Web/serverFarms`\n> - **Private Endpoints**: The deployment will create private endpoints for storage account services (blob, file, queue, table) within the same subnet""}, {""title"": """", ""description"": ""**STEP 2 - Configuration of the Cisco Umbrella logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Umbrella data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Umbrella data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n4. **For Private Access Deployment**: Also enter **existingVnetName**, **existingVnetResourceGroupName**, and **existingSubnetName** (ensure subnet is delegated to Microsoft.Web/serverFarms)\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n5. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n6. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Umbrella data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}, {""name"": ""Virtual Network permissions (for private access)"", ""description"": ""For private storage account access, **Network Contributor** permissions are required on the Virtual Network and subnet. The subnet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp_elasticpremium.json","true" -"","CiscoWSA","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoWSA","azuresentinel","azure-sentinel-solution-ciscowsa","2021-06-29","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"Syslog","CiscoWSA","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoWSA","azuresentinel","azure-sentinel-solution-ciscowsa","2021-06-29","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoWSA","Cisco","[Deprecated] Cisco Web Security Appliance","[Cisco Web Security Appliance (WSA)](https://www.cisco.com/c/en/us/products/security/web-security-appliance/index.html) data connector provides the capability to ingest [Cisco WSA Access Logs](https://www.cisco.com/c/en/us/td/docs/security/wsa/wsa_14-0/User-Guide/b_WSA_UserGuide_14_0/b_WSA_UserGuide_11_7_chapter_010101.html) into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**CiscoWSAEvent**](https://aka.ms/sentinel-CiscoWSA-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**NOTE:** This data connector has been developed using AsyncOS 14.0 for Cisco Web Security Appliance"", ""instructions"": []}, {""title"": ""1. Configure Cisco Web Security Appliance to forward logs via Syslog to remote server where you will install the agent."", ""description"": ""[Follow these steps](https://www.cisco.com/c/en/us/td/docs/security/esa/esa14-0/user_guide/b_ESA_Admin_Guide_14-0/b_ESA_Admin_Guide_12_1_chapter_0100111.html#con_1134718) to configure Cisco Web Security Appliance to forward logs via Syslog\n\n>**NOTE:** Select **Syslog Push** as a Retrieval Method.""}, {""title"": ""2. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Server to which the logs will be forwarded.\n\n> Logs on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""3. Check logs in Microsoft Sentinel"", ""description"": ""Open Log Analytics to check if the logs are received using the Syslog schema.\n\n>**NOTE:** It may take up to 15 minutes before new logs will appear in Syslog table."", ""instructions"": []}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoWSA/Data%20Connectors/Connector_WSA_Syslog.json","true" -"","Citrix ADC","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20ADC","azuresentinel","azure-sentinel-solution-citrixadc","2022-06-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"Syslog","Citrix ADC","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20ADC","azuresentinel","azure-sentinel-solution-citrixadc","2022-06-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CitrixADC","Citrix","[Deprecated] Citrix ADC (former NetScaler)","The [Citrix ADC (former NetScaler)](https://www.citrix.com/products/citrix-adc/) data connector provides the capability to ingest Citrix ADC logs into Microsoft Sentinel. If you want to ingest Citrix WAF logs into Microsoft Sentinel, refer this [documentation](https://learn.microsoft.com/azure/sentinel/data-connectors/citrix-waf-web-app-firewall)","[{""title"": """", ""description"": "">**NOTE:** 1. This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias CitrixADCEvent and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20ADC/Parsers/CitrixADCEvent.yaml), this function maps Citrix ADC (former NetScaler) events to Advanced Security Information Model [ASIM](https://docs.microsoft.com/azure/sentinel/normalization). The function usually takes 10-15 minutes to activate after solution installation/update. \n\n>**NOTE:** 2. This parser requires a watchlist named **`Sources_by_SourceType`** \n\n> i. If you don't have watchlist already created, please click [here](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FASIM%2Fdeploy%2FWatchlists%2FASimSourceType.json) to create. \n\n> ii. Open watchlist **`Sources_by_SourceType`** and add entries for this data source.\n\n> iii. The SourceType value for CitrixADC is **`CitrixADC`**. \n\n> You can refer [this](https://learn.microsoft.com/en-us/azure/sentinel/normalization-manage-parsers?WT.mc_id=Portal-fx#configure-the-sources-relevant-to-a-source-specific-parser) documentation for more details"", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n 2. Select **Apply below configuration to my machines** and select the facilities and severities.\n 3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure Citrix ADC to forward logs via Syslog"", ""description"": ""3.1 Navigate to **Configuration tab > System > Auditing > Syslog > Servers tab**\n\n 3.2 Specify **Syslog action name**.\n\n 3.3 Set IP address of remote Syslog server and port.\n\n 3.4 Set **Transport type** as **TCP** or **UDP** depending on your remote Syslog server configuration.\n\n 3.5 You can refer Citrix ADC (former NetScaler) [documentation](https://docs.netscaler.com/) for more details.""}, {""title"": ""4. Check logs in Microsoft Sentinel"", ""description"": ""Open Log Analytics to check if the logs are received using the Syslog schema.\n\n>**NOTE:** It may take up to 15 minutes before new logs will appear in Syslog table."", ""instructions"": []}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20ADC/Data%20Connectors/Connector_CitrixADC_syslog.json","true" -"","Citrix Analytics for Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20Analytics%20for%20Security","citrix","citrix_analytics_for_security_mss","2022-05-06","","","Citrix Systems","Partner","https://www.citrix.com/support/","","domains","","","","","","","false","","false" -"CitrixAnalytics_indicatorEventDetails_CL","Citrix Analytics for Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20Analytics%20for%20Security","citrix","citrix_analytics_for_security_mss","2022-05-06","","","Citrix Systems","Partner","https://www.citrix.com/support/","","domains","Citrix","CITRIX","CITRIX SECURITY ANALYTICS","Citrix Analytics (Security) integration with Microsoft Sentinel helps you to export data analyzed for risky events from Citrix Analytics (Security) into Microsoft Sentinel environment. You can create custom dashboards, analyze data from other sources along with that from Citrix Analytics (Security) and create custom workflows using Logic Apps to monitor and mitigate security events.","[{""title"": """", ""description"": ""To get access to this capability and the configuration steps on Citrix Analytics, please visit: [Connect Citrix to Microsoft Sentinel.](https://aka.ms/Sentinel-Citrix-Connector)\u200b\n"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Licensing"", ""description"": ""Entitlements to Citrix Security Analytics in Citrix Cloud. Please review [Citrix Tool License Agreement.](https://aka.ms/sentinel-citrixanalyticslicense-readme)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20Analytics%20for%20Security/Data%20Connectors/CitrixSecurityAnalytics.json","true" -"CitrixAnalytics_indicatorSummary_CL","Citrix Analytics for Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20Analytics%20for%20Security","citrix","citrix_analytics_for_security_mss","2022-05-06","","","Citrix Systems","Partner","https://www.citrix.com/support/","","domains","Citrix","CITRIX","CITRIX SECURITY ANALYTICS","Citrix Analytics (Security) integration with Microsoft Sentinel helps you to export data analyzed for risky events from Citrix Analytics (Security) into Microsoft Sentinel environment. You can create custom dashboards, analyze data from other sources along with that from Citrix Analytics (Security) and create custom workflows using Logic Apps to monitor and mitigate security events.","[{""title"": """", ""description"": ""To get access to this capability and the configuration steps on Citrix Analytics, please visit: [Connect Citrix to Microsoft Sentinel.](https://aka.ms/Sentinel-Citrix-Connector)\u200b\n"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Licensing"", ""description"": ""Entitlements to Citrix Security Analytics in Citrix Cloud. Please review [Citrix Tool License Agreement.](https://aka.ms/sentinel-citrixanalyticslicense-readme)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20Analytics%20for%20Security/Data%20Connectors/CitrixSecurityAnalytics.json","true" -"CitrixAnalytics_riskScoreChange_CL","Citrix Analytics for Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20Analytics%20for%20Security","citrix","citrix_analytics_for_security_mss","2022-05-06","","","Citrix Systems","Partner","https://www.citrix.com/support/","","domains","Citrix","CITRIX","CITRIX SECURITY ANALYTICS","Citrix Analytics (Security) integration with Microsoft Sentinel helps you to export data analyzed for risky events from Citrix Analytics (Security) into Microsoft Sentinel environment. You can create custom dashboards, analyze data from other sources along with that from Citrix Analytics (Security) and create custom workflows using Logic Apps to monitor and mitigate security events.","[{""title"": """", ""description"": ""To get access to this capability and the configuration steps on Citrix Analytics, please visit: [Connect Citrix to Microsoft Sentinel.](https://aka.ms/Sentinel-Citrix-Connector)\u200b\n"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Licensing"", ""description"": ""Entitlements to Citrix Security Analytics in Citrix Cloud. Please review [Citrix Tool License Agreement.](https://aka.ms/sentinel-citrixanalyticslicense-readme)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20Analytics%20for%20Security/Data%20Connectors/CitrixSecurityAnalytics.json","true" -"CitrixAnalytics_userProfile_CL","Citrix Analytics for Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20Analytics%20for%20Security","citrix","citrix_analytics_for_security_mss","2022-05-06","","","Citrix Systems","Partner","https://www.citrix.com/support/","","domains","Citrix","CITRIX","CITRIX SECURITY ANALYTICS","Citrix Analytics (Security) integration with Microsoft Sentinel helps you to export data analyzed for risky events from Citrix Analytics (Security) into Microsoft Sentinel environment. You can create custom dashboards, analyze data from other sources along with that from Citrix Analytics (Security) and create custom workflows using Logic Apps to monitor and mitigate security events.","[{""title"": """", ""description"": ""To get access to this capability and the configuration steps on Citrix Analytics, please visit: [Connect Citrix to Microsoft Sentinel.](https://aka.ms/Sentinel-Citrix-Connector)\u200b\n"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Licensing"", ""description"": ""Entitlements to Citrix Security Analytics in Citrix Cloud. Please review [Citrix Tool License Agreement.](https://aka.ms/sentinel-citrixanalyticslicense-readme)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20Analytics%20for%20Security/Data%20Connectors/CitrixSecurityAnalytics.json","true" -"","Citrix Web App Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20Web%20App%20Firewall","citrix","citrix_waf_mss","2022-05-06","","","Citrix Systems","Partner","https://www.citrix.com/support/","","domains","","","","","","","false","","false" -"CommonSecurityLog","Citrix Web App Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20Web%20App%20Firewall","citrix","citrix_waf_mss","2022-05-06","","","Citrix Systems","Partner","https://www.citrix.com/support/","","domains","CitrixWAF","Citrix Systems Inc.","[Deprecated] Citrix WAF (Web App Firewall) via Legacy Agent"," Citrix WAF (Web App Firewall) is an industry leading enterprise-grade WAF solution. Citrix WAF mitigates threats against your public-facing assets, including websites, apps, and APIs. From layer 3 to layer 7, Citrix WAF includes protections such as IP reputation, bot mitigation, defense against the OWASP Top 10 application threats, built-in signatures to protect against application stack vulnerabilities, and more.

Citrix WAF supports Common Event Format (CEF) which is an industry standard format on top of Syslog messages . By connecting Citrix WAF CEF logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Configure Citrix WAF to send Syslog messages in CEF format to the proxy machine using the steps below. \n\n1. Follow [this guide](https://support.citrix.com/article/CTX234174) to configure WAF.\n\n2. Follow [this guide](https://support.citrix.com/article/CTX136146) to configure CEF logs.\n\n3. Follow [this guide](https://docs.citrix.com/en-us/citrix-adc/13/system/audit-logging/configuring-audit-logging.html) to forward the logs to proxy . Make sure you to send the logs to port 514 TCP on the Linux machine's IP address.\n\n""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20Web%20App%20Firewall/Data%20Connectors/Citrix_WAF.json","true" -"CommonSecurityLog","Citrix Web App Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20Web%20App%20Firewall","citrix","citrix_waf_mss","2022-05-06","","","Citrix Systems","Partner","https://www.citrix.com/support/","","domains","CitrixWAFAma","Citrix Systems Inc.","[Deprecated] Citrix WAF (Web App Firewall) via AMA"," Citrix WAF (Web App Firewall) is an industry leading enterprise-grade WAF solution. Citrix WAF mitigates threats against your public-facing assets, including websites, apps, and APIs. From layer 3 to layer 7, Citrix WAF includes protections such as IP reputation, bot mitigation, defense against the OWASP Top 10 application threats, built-in signatures to protect against application stack vulnerabilities, and more.

Citrix WAF supports Common Event Format (CEF) which is an industry standard format on top of Syslog messages . By connecting Citrix WAF CEF logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Configure Citrix WAF to send Syslog messages in CEF format to the proxy machine using the steps below. \n\n1. Follow [this guide](https://support.citrix.com/article/CTX234174) to configure WAF.\n\n2. Follow [this guide](https://support.citrix.com/article/CTX136146) to configure CEF logs.\n\n3. Follow [this guide](https://docs.citrix.com/en-us/citrix-adc/13/system/audit-logging/configuring-audit-logging.html) to forward the logs to proxy . Make sure you to send the logs to port 514 TCP on the Linux machine's IP address.\n\n"", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20Web%20App%20Firewall/Data%20Connectors/template_Citrix_WAFAMA.json","true" -"","Claroty","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Claroty","azuresentinel","azure-sentinel-solution-claroty","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"CommonSecurityLog","Claroty","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Claroty","azuresentinel","azure-sentinel-solution-claroty","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","Claroty","Claroty","[Deprecated] Claroty via Legacy Agent","The [Claroty](https://claroty.com/) data connector provides the capability to ingest [Continuous Threat Detection](https://claroty.com/resources/datasheets/continuous-threat-detection) and [Secure Remote Access](https://claroty.com/industrial-cybersecurity/sra) events into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**ClarotyEvent**](https://aka.ms/sentinel-claroty-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Configure Claroty to send logs using CEF"", ""description"": ""Configure log forwarding using CEF:\n\n1. Navigate to the **Syslog** section of the Configuration menu.\n\n2. Select **+Add**.\n\n3. In the **Add New Syslog Dialog** specify Remote Server **IP**, **Port**, **Protocol** and select **Message Format** - **CEF**.\n\n4. Choose **Save** to exit the **Add Syslog dialog**.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Claroty/Data%20Connectors/Connector_Claroty_CEF.json","true" -"CommonSecurityLog","Claroty","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Claroty","azuresentinel","azure-sentinel-solution-claroty","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","ClarotyAma","Claroty","[Deprecated] Claroty via AMA","The [Claroty](https://claroty.com/) data connector provides the capability to ingest [Continuous Threat Detection](https://claroty.com/resources/datasheets/continuous-threat-detection) and [Secure Remote Access](https://claroty.com/industrial-cybersecurity/sra) events into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**ClarotyEvent**](https://aka.ms/sentinel-claroty-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Configure Claroty to send logs using CEF"", ""description"": ""Configure log forwarding using CEF:\n\n1. Navigate to the **Syslog** section of the Configuration menu.\n\n2. Select **+Add**.\n\n3. In the **Add New Syslog Dialog** specify Remote Server **IP**, **Port**, **Protocol** and select **Message Format** - **CEF**.\n\n4. Choose **Save** to exit the **Add Syslog dialog**."", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Claroty/Data%20Connectors/template_ClarotyAMA.json","true" -"","Claroty xDome","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Claroty%20xDome","claroty1709722359369","microsoft-sentinel-solution-xdome","2024-02-01","","","xDome Customer Support","Partner","https://claroty.com/support-policy","","domains,verticals","","","","","","","false","","false" -"CommonSecurityLog","Claroty xDome","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Claroty%20xDome","claroty1709722359369","microsoft-sentinel-solution-xdome","2024-02-01","","","xDome Customer Support","Partner","https://claroty.com/support-policy","","domains,verticals","ClarotyxDome","Claroty","Claroty xDome","[Claroty](https://claroty.com/) xDome delivers comprehensive security and alert management capabilities for healthcare and industrial network environments. It is designed to map multiple source types, identify the collected data, and integrate it into Microsoft Sentinel data models. This results in the ability to monitor all potential threats in your healthcare and industrial environments in one location, leading to more effective security monitoring and a stronger security posture.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python --version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Configure the Claroty xDome - Microsoft Sentinel integration to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python --version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Claroty%20xDome/Data%20Connectors/Claroty_xDome.json","true" -"","Cloud Identity Threat Protection Essentials","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cloud%20Identity%20Threat%20Protection%20Essentials","azuresentinel","azure-sentinel-solution-cloudthreatdetection","2022-11-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"","Cloud Service Threat Protection Essentials","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cloud%20Service%20Threat%20Protection%20Essentials","azuresentinel","azure-sentinel-solution-cloudservicedetection","2022-11-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"","Cloudflare","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cloudflare","cloudflare","cloudflare_sentinel","2021-10-20","","","Cloudflare","Partner","https://support.cloudflare.com","","domains","","","","","","","false","","false" -"Cloudflare_CL","Cloudflare","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cloudflare","cloudflare","cloudflare_sentinel","2021-10-20","","","Cloudflare","Partner","https://support.cloudflare.com","","domains","CloudflareDataConnector","Cloudflare","[DEPRECATED] Cloudflare","The Cloudflare data connector provides the capability to ingest [Cloudflare logs](https://developers.cloudflare.com/logs/) into Microsoft Sentinel using the Cloudflare Logpush and Azure Blob Storage. Refer to [Cloudflare documentation](https://developers.cloudflare.com/logs/logpush) for more information.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Azure Blob Storage API to pull logs into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Cloudflare**](https://aka.ms/sentinel-CloudflareDataConnector-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration of the Cloudflare Logpush**\n\nSee documentation to [setup Cloudflare Logpush to Microsoft Azure](https://developers.cloudflare.com/logs/logpush/logpush-dashboard)""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Cloudflare data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as Azure Blob Storage connection string and container name, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cloudflare data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CloudflareDataConnector-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Azure Blob Storage Container Name**, **Azure Blob Storage Connection String**, **Microsoft Sentinel Workspace Id**, **Microsoft Sentinel Shared Key**\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cloudflare data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CloudflareDataConnector-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CloudflareXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tCONTAINER_NAME\n\t\tAZURE_STORAGE_CONNECTION_STRING\n\t\tWORKSPACE_ID\n\t\tSHARED_KEY\n\t\tlogAnalyticsUri (Optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://WORKSPACE_ID.ods.opinsights.azure.us`. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Azure Blob Storage connection string and container name"", ""description"": ""Azure Blob Storage connection string and container name where the logs are pushed to by Cloudflare Logpush. [See the documentation to learn more about creating Azure Blob Storage container.](https://learn.microsoft.com/azure/storage/blobs/storage-quickstart-blobs-portal)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cloudflare/Data%20Connectors/Cloudflare_API_FunctionApp.json","true" -"CloudflareV2_CL","Cloudflare","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cloudflare","cloudflare","cloudflare_sentinel","2021-10-20","","","Cloudflare","Partner","https://support.cloudflare.com","","domains","CloudflareDefinition","Microsoft","Cloudflare (Using Blob Container) (via Codeless Connector Framework)"," The Cloudflare data connector provides the capability to ingest Cloudflare logs into Microsoft Sentinel using the Cloudflare Logpush and Azure Blob Storage. Refer to [Cloudflare documentation](https://developers.cloudflare.com/logs/about/)for more information.","[{""title"": ""Connect Cloudflare Logs to Microsoft Sentinel"", ""description"": ""To enable Cloudflare logs for Microsoft Sentinel, provide the required information below and click on Connect.\n>"", ""instructions"": [{""parameters"": {""tenantId"": ""[subscription().tenantId]"", ""name"": ""principalId"", ""appId"": ""4f05ce56-95b6-4612-9d98-a45c8cc33f9f""}, ""type"": ""ServicePrincipalIDTextBox_test""}, {""parameters"": {""label"": ""The Blob container's URL you want to collect data from"", ""type"": ""text"", ""name"": ""blobContainerUri"", ""validations"": {""required"": true}}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""The Blob container's storage account resource group name"", ""type"": ""text"", ""name"": ""StorageAccountResourceGroupName"", ""validations"": {""required"": true}}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""The Blob container's storage account location"", ""type"": ""text"", ""name"": ""StorageAccountLocation"", ""validations"": {""required"": true}}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""The Blob container's storage account subscription id"", ""type"": ""text"", ""name"": ""StorageAccountSubscription"", ""validations"": {""required"": true}}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""The event grid topic name of the blob container's storage account if exist. else keep empty."", ""description"": ""The data flow using event grid to send 'blob-created event' notifications. There could be only one event grid topic for each storage account.\nGo to your blob container's storage account and look in the 'Events' section. If you already have a topic, please provide it's name. Else, keep the text box empty."", ""type"": ""text"", ""name"": ""EGSystemTopicName"", ""validations"": {""required"": false}}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}], ""customs"": [{""name"": ""Create a storage account and a container"", ""description"": ""Before setting up logpush in Cloudflare, first create a storage account and a container in Microsoft Azure. Use [this guide](https://learn.microsoft.com/en-us/azure/storage/blobs/storage-blobs-introduction) to know more about Container and Blob. Follow the steps in the [documentation](https://learn.microsoft.com/en-us/azure/storage/common/storage-account-create?tabs=azure-portal) to create an Azure Storage account.""}, {""name"": ""Generate a Blob SAS URL"", ""description"": ""Create and Write permissions are required. Refer the [documentation](https://learn.microsoft.com/en-us/azure/ai-services/translator/document-translation/how-to-guides/create-sas-tokens?tabs=Containers) to know more about Blob SAS token and url.""}, {""name"": ""Collecting logs from Cloudflare to your Blob container"", ""description"": ""Follow the steps in the [documentation](https://developers.cloudflare.com/logs/get-started/enable-destinations/azure/) for collecting logs from Cloudflare to your Blob container.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cloudflare/Data%20Connectors/CloudflareLog_CCF/CloudflareLog_ConnectorDefinition.json","true" -"","Cloudflare CCF","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cloudflare%20CCF","cloudflare","azure-sentinel-solution-cloudflare-ccf","2025-09-30","","","Cloudflare","Partner","https://support.cloudflare.com","","domains","","","","","","","false","","false" -"CloudflareV2_CL","Cloudflare CCF","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cloudflare%20CCF","cloudflare","azure-sentinel-solution-cloudflare-ccf","2025-09-30","","","Cloudflare","Partner","https://support.cloudflare.com","","domains","CloudflareDefinition","Microsoft","Cloudflare (Using Blob Container) (via Codeless Connector Framework)"," The Cloudflare data connector provides the capability to ingest Cloudflare logs into Microsoft Sentinel using the Cloudflare Logpush and Azure Blob Storage. Refer to [Cloudflare documentation](https://developers.cloudflare.com/logs/about/)for more information.","[{""title"": ""Connect Cloudflare Logs to Microsoft Sentinel"", ""description"": ""To enable Cloudflare logs for Microsoft Sentinel, provide the required information below and click on Connect.\n>"", ""instructions"": [{""parameters"": {""tenantId"": ""[subscription().tenantId]"", ""name"": ""principalId"", ""appId"": ""4f05ce56-95b6-4612-9d98-a45c8cc33f9f""}, ""type"": ""ServicePrincipalIDTextBox_test""}, {""parameters"": {""label"": ""The Blob container's URL you want to collect data from"", ""type"": ""text"", ""name"": ""blobContainerUri"", ""validations"": {""required"": true}}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""The Blob container's storage account resource group name"", ""type"": ""text"", ""name"": ""StorageAccountResourceGroupName"", ""validations"": {""required"": true}}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""The Blob container's storage account location"", ""type"": ""text"", ""name"": ""StorageAccountLocation"", ""validations"": {""required"": true}}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""The Blob container's storage account subscription id"", ""type"": ""text"", ""name"": ""StorageAccountSubscription"", ""validations"": {""required"": true}}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""The event grid topic name of the blob container's storage account if exist. else keep empty."", ""description"": ""The data flow using event grid to send 'blob-created event' notifications. There could be only one event grid topic for each storage account.\nGo to your blob container's storage account and look in the 'Events' section. If you already have a topic, please provide it's name. Else, keep the text box empty."", ""type"": ""text"", ""name"": ""EGSystemTopicName"", ""validations"": {""required"": false}}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}], ""customs"": [{""name"": ""Create a storage account and a container"", ""description"": ""Before setting up logpush in Cloudflare, first create a storage account and a container in Microsoft Azure. Use [this guide](https://learn.microsoft.com/en-us/azure/storage/blobs/storage-blobs-introduction) to know more about Container and Blob. Follow the steps in the [documentation](https://learn.microsoft.com/en-us/azure/storage/common/storage-account-create?tabs=azure-portal) to create an Azure Storage account.""}, {""name"": ""Generate a Blob SAS URL"", ""description"": ""Create and Write permissions are required. Refer the [documentation](https://learn.microsoft.com/en-us/azure/ai-services/translator/document-translation/how-to-guides/create-sas-tokens?tabs=Containers) to know more about Blob SAS token and url.""}, {""name"": ""Collecting logs from Cloudflare to your Blob container"", ""description"": ""Follow the steps in the [documentation](https://developers.cloudflare.com/logs/get-started/enable-destinations/azure/) for collecting logs from Cloudflare to your Blob container.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cloudflare%20CCF/Data%20Connectors/CloudflareLog_CCF/CloudflareLog_ConnectorDefinition.json","true" -"","CofenseIntelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CofenseIntelligence","cofense","cofense-intelligence-sentinel","2023-05-26","2024-05-26","","Cofense Support","Partner","https://cofense.com/contact-support/","","domains","","","","","","","false","","false" -"Malware_Data_CL","CofenseIntelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CofenseIntelligence","cofense","cofense-intelligence-sentinel","2023-05-26","2024-05-26","","Cofense Support","Partner","https://cofense.com/contact-support/","","domains","CofenseIntelligence","Cofense","Cofense Intelligence Threat Indicators Ingestion","The [Cofense-Intelligence](https://cofense.com/product-services/phishing-intelligence/) data connector provides the following capabilities:
1. CofenseToSentinel :
>* Get Threat Indicators from the Cofense Intelligence platform and create Threat Intelligence Indicators in Microsoft Sentinel.
2. SentinelToDefender :
>* Get Malware from Cofense Intelligence and post to custom logs table.
3. CofenseIntelligenceMalware :
>* Get Cofense Intelligence Threat Intelligence Indicators from Microsoft Sentinel Threat Intelligence and create/update Indicators in Microsoft Defender for Endpoints.
4. DownloadThreatReports :
>* This data connector will fetch the malware data and create the Link from which we can download Threat Reports.
5. RetryFailedIndicators :
>* This data connector will fetch failed indicators from failed indicators file and retry creating/updating Threat Intelligence indicators in Microsoft Sentinel.


For more details of REST APIs refer to the below documentations:
1. Cofense Intelligence API documentation:
> https://www.threathq.com/docs/rest_api_reference.html
2. Microsoft Threat Intelligence Indicator documentation:
> https://learn.microsoft.com/rest/api/securityinsights/preview/threat-intelligence-indicator
3. Microsoft Defender for Endpoints Indicator documentation:
> https://learn.microsoft.com/microsoft-365/security/defender-endpoint/ti-indicator?view=o365-worldwide","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Cofense Intelligence APIs to pull its Threat Indicators and create Threat Intelligence Indicators into Microsoft Sentinel Threat Intelligence and create/update Threat Indicators in Cofense. Likewise, it also creates/updates Cofense Based Threat Indicators in Microsoft Defender for Endpoints. All this might result in additional indicator and data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Microsoft Azure Active Directory Application**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new Azure Active Directory application:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Azure Active Directory**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of CofenseIntelligence Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for Microsoft Azure Active Directory Application**\n\n Sometimes called an application password, a client secret is a string value required for the execution of CofenseIntelligence Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of CofenseIntelligence Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to Microsoft Azure Active Directory Application**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Assign Defender Threat Indicator permissions to Microsoft Azure Active Directory Application**\n\n Follow the steps in this section to assign the permissions:\n 1. In the Azure portal, in **App registrations**, select **your application**.\n 2. To enable an app to access Defender for Endpoint indicators, assign it **'Ti.ReadWrite.All'** permission, on your application page, select **API Permissions > Add permission > APIs my organization uses >, type WindowsDefenderATP, and then select WindowsDefenderATP**.\n 3. Select **Application permissions > Ti.ReadWrite.All**, and then select **Add permissions**.\n 4. Select **Grant consent**. \n\n> **Reference link:** [https://docs.microsoft.com/microsoft-365/security/defender-endpoint/exposed-apis-create-app-webapp?view=o365-worldwide](https://docs.microsoft.com/microsoft-365/security/defender-endpoint/exposed-apis-create-app-webapp?view=o365-worldwide)""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create/get Credentials for the Cofense Intelligence account** \n\n Follow the steps in this section to create/get **Cofense Username** and **Password**:\n 1. Login to https://threathq.com and go to the **Settings menu** on the left navigation bar.\n 2. Choose the API Tokens tab and select **Add a New Token**\n 3. Make sure to save the **password**, as it will not be accessible again.""}, {""title"": """", ""description"": ""**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Cofense Intelligence Threat Indicators data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Cofense API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cofense connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CofenseIntelligence-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tCofense BaseURL (https:///) \n\t\tCofense Username \n\t\tCofense Password \n\t\tAzure Client ID \n\t\tAzure Client Secret \n\t\tAzure Tenant ID \n\t\tAzure Resource Group Name \n\t\tAzure Workspace Name \n\t\tAzure Subscription ID \n\t\tRequireProxy \n\t\tProxy Username (optional) \n\t\tProxy Password (optional) \n\t\tProxy URL (optional) \n\t\tProxy Port (optional) \n\t\tLogLevel (optional) \n\t\tMalware_Data_Table_name\n\t\tSendCofenseIndicatorToDefender \n\t\tSchedule \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cofense Intelligence Threat Indicators data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CofenseIntelligence-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CofenseXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tCofense BaseURL (https:///) \n\t\tCofense Username \n\t\tCofense Password \n\t\tAzure Client ID \n\t\tAzure Client Secret \n\t\tAzure Tenant ID \n\t\tAzure Resource Group Name \n\t\tAzure Workspace Name \n\t\tFunction App Name \n\t\tAzure Subscription ID \n\t\tRequireProxy \n\t\tProxy Username (optional) \n\t\tProxy Password (optional) \n\t\tProxy URL (optional) \n\t\tProxy Port (optional) \n\t\tLogLevel (optional) \n\t\tMalware_Data_Table_name\n\t\tSendCofenseIndicatorToDefender \n\t\tSchedule \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Cofense Username** and **Password** is required. See the documentation to learn more about API on the [Rest API reference](https://www.threathq.com/docs/rest_api_reference.html)""}, {""name"": ""Microsoft Defender for Endpoints"", ""description"": ""**Microsoft Defender for Endpoints License** is required for SentinelToDefender function.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CofenseIntelligence/Data%20Connectors/CofenseIntelligenceDataConnector/CofenseIntelligence_API_FunctionApp.json","true" -"ThreatIntelligenceIndicator","CofenseIntelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CofenseIntelligence","cofense","cofense-intelligence-sentinel","2023-05-26","2024-05-26","","Cofense Support","Partner","https://cofense.com/contact-support/","","domains","CofenseIntelligence","Cofense","Cofense Intelligence Threat Indicators Ingestion","The [Cofense-Intelligence](https://cofense.com/product-services/phishing-intelligence/) data connector provides the following capabilities:
1. CofenseToSentinel :
>* Get Threat Indicators from the Cofense Intelligence platform and create Threat Intelligence Indicators in Microsoft Sentinel.
2. SentinelToDefender :
>* Get Malware from Cofense Intelligence and post to custom logs table.
3. CofenseIntelligenceMalware :
>* Get Cofense Intelligence Threat Intelligence Indicators from Microsoft Sentinel Threat Intelligence and create/update Indicators in Microsoft Defender for Endpoints.
4. DownloadThreatReports :
>* This data connector will fetch the malware data and create the Link from which we can download Threat Reports.
5. RetryFailedIndicators :
>* This data connector will fetch failed indicators from failed indicators file and retry creating/updating Threat Intelligence indicators in Microsoft Sentinel.


For more details of REST APIs refer to the below documentations:
1. Cofense Intelligence API documentation:
> https://www.threathq.com/docs/rest_api_reference.html
2. Microsoft Threat Intelligence Indicator documentation:
> https://learn.microsoft.com/rest/api/securityinsights/preview/threat-intelligence-indicator
3. Microsoft Defender for Endpoints Indicator documentation:
> https://learn.microsoft.com/microsoft-365/security/defender-endpoint/ti-indicator?view=o365-worldwide","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Cofense Intelligence APIs to pull its Threat Indicators and create Threat Intelligence Indicators into Microsoft Sentinel Threat Intelligence and create/update Threat Indicators in Cofense. Likewise, it also creates/updates Cofense Based Threat Indicators in Microsoft Defender for Endpoints. All this might result in additional indicator and data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Microsoft Azure Active Directory Application**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new Azure Active Directory application:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Azure Active Directory**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of CofenseIntelligence Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for Microsoft Azure Active Directory Application**\n\n Sometimes called an application password, a client secret is a string value required for the execution of CofenseIntelligence Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of CofenseIntelligence Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to Microsoft Azure Active Directory Application**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Assign Defender Threat Indicator permissions to Microsoft Azure Active Directory Application**\n\n Follow the steps in this section to assign the permissions:\n 1. In the Azure portal, in **App registrations**, select **your application**.\n 2. To enable an app to access Defender for Endpoint indicators, assign it **'Ti.ReadWrite.All'** permission, on your application page, select **API Permissions > Add permission > APIs my organization uses >, type WindowsDefenderATP, and then select WindowsDefenderATP**.\n 3. Select **Application permissions > Ti.ReadWrite.All**, and then select **Add permissions**.\n 4. Select **Grant consent**. \n\n> **Reference link:** [https://docs.microsoft.com/microsoft-365/security/defender-endpoint/exposed-apis-create-app-webapp?view=o365-worldwide](https://docs.microsoft.com/microsoft-365/security/defender-endpoint/exposed-apis-create-app-webapp?view=o365-worldwide)""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create/get Credentials for the Cofense Intelligence account** \n\n Follow the steps in this section to create/get **Cofense Username** and **Password**:\n 1. Login to https://threathq.com and go to the **Settings menu** on the left navigation bar.\n 2. Choose the API Tokens tab and select **Add a New Token**\n 3. Make sure to save the **password**, as it will not be accessible again.""}, {""title"": """", ""description"": ""**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Cofense Intelligence Threat Indicators data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Cofense API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cofense connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CofenseIntelligence-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tCofense BaseURL (https:///) \n\t\tCofense Username \n\t\tCofense Password \n\t\tAzure Client ID \n\t\tAzure Client Secret \n\t\tAzure Tenant ID \n\t\tAzure Resource Group Name \n\t\tAzure Workspace Name \n\t\tAzure Subscription ID \n\t\tRequireProxy \n\t\tProxy Username (optional) \n\t\tProxy Password (optional) \n\t\tProxy URL (optional) \n\t\tProxy Port (optional) \n\t\tLogLevel (optional) \n\t\tMalware_Data_Table_name\n\t\tSendCofenseIndicatorToDefender \n\t\tSchedule \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cofense Intelligence Threat Indicators data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CofenseIntelligence-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CofenseXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tCofense BaseURL (https:///) \n\t\tCofense Username \n\t\tCofense Password \n\t\tAzure Client ID \n\t\tAzure Client Secret \n\t\tAzure Tenant ID \n\t\tAzure Resource Group Name \n\t\tAzure Workspace Name \n\t\tFunction App Name \n\t\tAzure Subscription ID \n\t\tRequireProxy \n\t\tProxy Username (optional) \n\t\tProxy Password (optional) \n\t\tProxy URL (optional) \n\t\tProxy Port (optional) \n\t\tLogLevel (optional) \n\t\tMalware_Data_Table_name\n\t\tSendCofenseIndicatorToDefender \n\t\tSchedule \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Cofense Username** and **Password** is required. See the documentation to learn more about API on the [Rest API reference](https://www.threathq.com/docs/rest_api_reference.html)""}, {""name"": ""Microsoft Defender for Endpoints"", ""description"": ""**Microsoft Defender for Endpoints License** is required for SentinelToDefender function.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CofenseIntelligence/Data%20Connectors/CofenseIntelligenceDataConnector/CofenseIntelligence_API_FunctionApp.json","true" -"","CofenseTriage","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CofenseTriage","cofense","cofense-triage-sentinel","2023-03-24","2023-03-24","","Cofense Support","Partner","https://cofense.com/contact-support/","","domains","","","","","","","false","","false" -"Cofense_Triage_failed_indicators_CL","CofenseTriage","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CofenseTriage","cofense","cofense-triage-sentinel","2023-03-24","2023-03-24","","Cofense Support","Partner","https://cofense.com/contact-support/","","domains","CofenseTriage","Cofense","Cofense Triage Threat Indicators Ingestion","The [Cofense-Triage](https://cofense.com/product-services/cofense-triage/) data connector provides the following capabilities:
1. CofenseBasedIndicatorCreator :
>* Get Threat Indicators from the Cofense Triage platform and create Threat Intelligence Indicators in Microsoft Sentinel.
> * Ingest Cofense Indicator ID and report links into custom logs table.
2. NonCofenseBasedIndicatorCreatorToCofense :
>* Get Non-Cofense Threat Intelligence Indicators from Microsoft Sentinel Threat Intelligence and create/update Indicators in Cofense Triage platform.
3. IndicatorCreatorToDefender :
>* Get Cofense Triage Threat Intelligence Indicators from Microsoft Sentinel Threat Intelligence and create/update Indicators in Microsoft Defender for Endpoints.
4. RetryFailedIndicators :
>* Get failed indicators from failed indicators files and retry creating/updating Threat Intelligence indicators in Microsoft Sentinel.


For more details of REST APIs refer to the below two documentations:
1. Cofense API documentation:
> https://``/docs/api/v2/index.html
2. Microsoft Threat Intelligence Indicator documentation:
> https://learn.microsoft.com/rest/api/securityinsights/preview/threat-intelligence-indicator
3. Microsoft Defender for Endpoints Indicator documentation:
> https://learn.microsoft.com/microsoft-365/security/defender-endpoint/ti-indicator?view=o365-worldwide","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Cofense APIs to pull its Threat Indicators and create Threat Intelligence Indicators into Microsoft Sentinel Threat Intelligence and pulls Non-Cofense Threat Intelligence Indicators from Microsoft Sentinel and create/update Threat Indicators in Cofense. Likewise, it also creates/updates Cofense Based Threat Indicators in Microsoft Defender for Endpoints. All this might result in additional indicator and data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Microsoft Azure Active Directory Application**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new Azure Active Directory application:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Azure Active Directory**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of CofenseTriage Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for Microsoft Azure Active Directory Application**\n\n Sometimes called an application password, a client secret is a string value required for the execution of CofenseTriage Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of CofenseTriage Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to Microsoft Azure Active Directory Application**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Assign Defender Threat Indicator permissions to Microsoft Azure Active Directory Application**\n\n Follow the steps in this section to assign the permissions:\n 1. In the Azure portal, in **App registrations**, select **your application**.\n 2. To enable an app to access Defender for Endpoint indicators, assign it **'Ti.ReadWrite.All'** permission, on your application page, select **API Permissions > Add permission > APIs my organization uses >, type WindowsDefenderATP, and then select WindowsDefenderATP**.\n 3. Select **Application permissions > Ti.ReadWrite.All**, and then select **Add permissions**.\n 4. Select **Grant consent**. \n\n> **Reference link:** [https://docs.microsoft.com/microsoft-365/security/defender-endpoint/exposed-apis-create-app-webapp?view=o365-worldwide](https://docs.microsoft.com/microsoft-365/security/defender-endpoint/exposed-apis-create-app-webapp?view=o365-worldwide)""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create/get Credentials for the Cofense Triage account** \n\n Follow the steps in this section to create/get **Cofense Client ID** and **Client Secret**:\n 1. Go to **Administration > API Management > Version 2 tab > Applications**\n 2. Click on **New Application**\n 3. Add the required information and click on **submit**.""}, {""title"": """", ""description"": ""**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Cofense Triage Threat Indicators data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Cofense API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cofense connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CofenseTriage-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tCofense URL (https:///) \n\t\tCofense Client ID \n\t\tCofense Client Secret \n\t\tAzure Client ID \n\t\tAzure Client Secret \n\t\tAzure Tenant ID \n\t\tAzure Resource Group Name \n\t\tAzure Workspace Name \n\t\tAzure Subscription ID \n\t\tThreat Level \n\t\tProxy Username (optional) \n\t\tProxy Password (optional) \n\t\tProxy URL (optional) \n\t\tProxy Port (optional) \n\t\tThrottle Limit for Non-Cofense Indicators (optional) \n\t\tLogLevel (optional) \n\t\tReports Table Name \n\t\tSchedule \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cofense Triage Threat Indicators data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CofenseThreatIndicatorsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CofenseXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tCofense URL (https:///) \n\t\tCofense Client ID \n\t\tCofense Client Secret \n\t\tAzure Client ID \n\t\tAzure Client Secret \n\t\tAzure Tenant ID \n\t\tAzure Resource Group Name \n\t\tAzure Workspace Name \n\t\tAzure Subscription ID \n\t\tThreat Level \n\t\tProxy Username (optional) \n\t\tProxy Password (optional) \n\t\tProxy URL (optional) \n\t\tProxy Port (optional) \n\t\tThrottle Limit for Non-Cofense Indicators (optional) \n\t\tLogLevel (optional) \n\t\tReports Table Name \n\t\tSchedule \n\t\tlogAnalyticsUri (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Cofense Client ID** and **Client Secret** is required. See the documentation to learn more about API on the `https:///docs/api/v2/index.html`""}, {""name"": ""Microsoft Defender for Endpoints"", ""description"": ""**Microsoft Defender for Endpoints License** is required for IndicatorCreatorToDefender function.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CofenseTriage/Data%20Connectors/CofenseTriageDataConnector/CofenseTriage_API_FunctionApp.json","true" -"Report_links_data_CL","CofenseTriage","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CofenseTriage","cofense","cofense-triage-sentinel","2023-03-24","2023-03-24","","Cofense Support","Partner","https://cofense.com/contact-support/","","domains","CofenseTriage","Cofense","Cofense Triage Threat Indicators Ingestion","The [Cofense-Triage](https://cofense.com/product-services/cofense-triage/) data connector provides the following capabilities:
1. CofenseBasedIndicatorCreator :
>* Get Threat Indicators from the Cofense Triage platform and create Threat Intelligence Indicators in Microsoft Sentinel.
> * Ingest Cofense Indicator ID and report links into custom logs table.
2. NonCofenseBasedIndicatorCreatorToCofense :
>* Get Non-Cofense Threat Intelligence Indicators from Microsoft Sentinel Threat Intelligence and create/update Indicators in Cofense Triage platform.
3. IndicatorCreatorToDefender :
>* Get Cofense Triage Threat Intelligence Indicators from Microsoft Sentinel Threat Intelligence and create/update Indicators in Microsoft Defender for Endpoints.
4. RetryFailedIndicators :
>* Get failed indicators from failed indicators files and retry creating/updating Threat Intelligence indicators in Microsoft Sentinel.


For more details of REST APIs refer to the below two documentations:
1. Cofense API documentation:
> https://``/docs/api/v2/index.html
2. Microsoft Threat Intelligence Indicator documentation:
> https://learn.microsoft.com/rest/api/securityinsights/preview/threat-intelligence-indicator
3. Microsoft Defender for Endpoints Indicator documentation:
> https://learn.microsoft.com/microsoft-365/security/defender-endpoint/ti-indicator?view=o365-worldwide","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Cofense APIs to pull its Threat Indicators and create Threat Intelligence Indicators into Microsoft Sentinel Threat Intelligence and pulls Non-Cofense Threat Intelligence Indicators from Microsoft Sentinel and create/update Threat Indicators in Cofense. Likewise, it also creates/updates Cofense Based Threat Indicators in Microsoft Defender for Endpoints. All this might result in additional indicator and data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Microsoft Azure Active Directory Application**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new Azure Active Directory application:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Azure Active Directory**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of CofenseTriage Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for Microsoft Azure Active Directory Application**\n\n Sometimes called an application password, a client secret is a string value required for the execution of CofenseTriage Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of CofenseTriage Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to Microsoft Azure Active Directory Application**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Assign Defender Threat Indicator permissions to Microsoft Azure Active Directory Application**\n\n Follow the steps in this section to assign the permissions:\n 1. In the Azure portal, in **App registrations**, select **your application**.\n 2. To enable an app to access Defender for Endpoint indicators, assign it **'Ti.ReadWrite.All'** permission, on your application page, select **API Permissions > Add permission > APIs my organization uses >, type WindowsDefenderATP, and then select WindowsDefenderATP**.\n 3. Select **Application permissions > Ti.ReadWrite.All**, and then select **Add permissions**.\n 4. Select **Grant consent**. \n\n> **Reference link:** [https://docs.microsoft.com/microsoft-365/security/defender-endpoint/exposed-apis-create-app-webapp?view=o365-worldwide](https://docs.microsoft.com/microsoft-365/security/defender-endpoint/exposed-apis-create-app-webapp?view=o365-worldwide)""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create/get Credentials for the Cofense Triage account** \n\n Follow the steps in this section to create/get **Cofense Client ID** and **Client Secret**:\n 1. Go to **Administration > API Management > Version 2 tab > Applications**\n 2. Click on **New Application**\n 3. Add the required information and click on **submit**.""}, {""title"": """", ""description"": ""**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Cofense Triage Threat Indicators data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Cofense API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cofense connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CofenseTriage-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tCofense URL (https:///) \n\t\tCofense Client ID \n\t\tCofense Client Secret \n\t\tAzure Client ID \n\t\tAzure Client Secret \n\t\tAzure Tenant ID \n\t\tAzure Resource Group Name \n\t\tAzure Workspace Name \n\t\tAzure Subscription ID \n\t\tThreat Level \n\t\tProxy Username (optional) \n\t\tProxy Password (optional) \n\t\tProxy URL (optional) \n\t\tProxy Port (optional) \n\t\tThrottle Limit for Non-Cofense Indicators (optional) \n\t\tLogLevel (optional) \n\t\tReports Table Name \n\t\tSchedule \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cofense Triage Threat Indicators data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CofenseThreatIndicatorsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CofenseXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tCofense URL (https:///) \n\t\tCofense Client ID \n\t\tCofense Client Secret \n\t\tAzure Client ID \n\t\tAzure Client Secret \n\t\tAzure Tenant ID \n\t\tAzure Resource Group Name \n\t\tAzure Workspace Name \n\t\tAzure Subscription ID \n\t\tThreat Level \n\t\tProxy Username (optional) \n\t\tProxy Password (optional) \n\t\tProxy URL (optional) \n\t\tProxy Port (optional) \n\t\tThrottle Limit for Non-Cofense Indicators (optional) \n\t\tLogLevel (optional) \n\t\tReports Table Name \n\t\tSchedule \n\t\tlogAnalyticsUri (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Cofense Client ID** and **Client Secret** is required. See the documentation to learn more about API on the `https:///docs/api/v2/index.html`""}, {""name"": ""Microsoft Defender for Endpoints"", ""description"": ""**Microsoft Defender for Endpoints License** is required for IndicatorCreatorToDefender function.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CofenseTriage/Data%20Connectors/CofenseTriageDataConnector/CofenseTriage_API_FunctionApp.json","true" -"ThreatIntelligenceIndicator","CofenseTriage","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CofenseTriage","cofense","cofense-triage-sentinel","2023-03-24","2023-03-24","","Cofense Support","Partner","https://cofense.com/contact-support/","","domains","CofenseTriage","Cofense","Cofense Triage Threat Indicators Ingestion","The [Cofense-Triage](https://cofense.com/product-services/cofense-triage/) data connector provides the following capabilities:
1. CofenseBasedIndicatorCreator :
>* Get Threat Indicators from the Cofense Triage platform and create Threat Intelligence Indicators in Microsoft Sentinel.
> * Ingest Cofense Indicator ID and report links into custom logs table.
2. NonCofenseBasedIndicatorCreatorToCofense :
>* Get Non-Cofense Threat Intelligence Indicators from Microsoft Sentinel Threat Intelligence and create/update Indicators in Cofense Triage platform.
3. IndicatorCreatorToDefender :
>* Get Cofense Triage Threat Intelligence Indicators from Microsoft Sentinel Threat Intelligence and create/update Indicators in Microsoft Defender for Endpoints.
4. RetryFailedIndicators :
>* Get failed indicators from failed indicators files and retry creating/updating Threat Intelligence indicators in Microsoft Sentinel.


For more details of REST APIs refer to the below two documentations:
1. Cofense API documentation:
> https://``/docs/api/v2/index.html
2. Microsoft Threat Intelligence Indicator documentation:
> https://learn.microsoft.com/rest/api/securityinsights/preview/threat-intelligence-indicator
3. Microsoft Defender for Endpoints Indicator documentation:
> https://learn.microsoft.com/microsoft-365/security/defender-endpoint/ti-indicator?view=o365-worldwide","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Cofense APIs to pull its Threat Indicators and create Threat Intelligence Indicators into Microsoft Sentinel Threat Intelligence and pulls Non-Cofense Threat Intelligence Indicators from Microsoft Sentinel and create/update Threat Indicators in Cofense. Likewise, it also creates/updates Cofense Based Threat Indicators in Microsoft Defender for Endpoints. All this might result in additional indicator and data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Microsoft Azure Active Directory Application**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new Azure Active Directory application:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Azure Active Directory**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of CofenseTriage Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for Microsoft Azure Active Directory Application**\n\n Sometimes called an application password, a client secret is a string value required for the execution of CofenseTriage Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of CofenseTriage Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to Microsoft Azure Active Directory Application**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Assign Defender Threat Indicator permissions to Microsoft Azure Active Directory Application**\n\n Follow the steps in this section to assign the permissions:\n 1. In the Azure portal, in **App registrations**, select **your application**.\n 2. To enable an app to access Defender for Endpoint indicators, assign it **'Ti.ReadWrite.All'** permission, on your application page, select **API Permissions > Add permission > APIs my organization uses >, type WindowsDefenderATP, and then select WindowsDefenderATP**.\n 3. Select **Application permissions > Ti.ReadWrite.All**, and then select **Add permissions**.\n 4. Select **Grant consent**. \n\n> **Reference link:** [https://docs.microsoft.com/microsoft-365/security/defender-endpoint/exposed-apis-create-app-webapp?view=o365-worldwide](https://docs.microsoft.com/microsoft-365/security/defender-endpoint/exposed-apis-create-app-webapp?view=o365-worldwide)""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create/get Credentials for the Cofense Triage account** \n\n Follow the steps in this section to create/get **Cofense Client ID** and **Client Secret**:\n 1. Go to **Administration > API Management > Version 2 tab > Applications**\n 2. Click on **New Application**\n 3. Add the required information and click on **submit**.""}, {""title"": """", ""description"": ""**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Cofense Triage Threat Indicators data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Cofense API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cofense connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CofenseTriage-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tCofense URL (https:///) \n\t\tCofense Client ID \n\t\tCofense Client Secret \n\t\tAzure Client ID \n\t\tAzure Client Secret \n\t\tAzure Tenant ID \n\t\tAzure Resource Group Name \n\t\tAzure Workspace Name \n\t\tAzure Subscription ID \n\t\tThreat Level \n\t\tProxy Username (optional) \n\t\tProxy Password (optional) \n\t\tProxy URL (optional) \n\t\tProxy Port (optional) \n\t\tThrottle Limit for Non-Cofense Indicators (optional) \n\t\tLogLevel (optional) \n\t\tReports Table Name \n\t\tSchedule \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cofense Triage Threat Indicators data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CofenseThreatIndicatorsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CofenseXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tCofense URL (https:///) \n\t\tCofense Client ID \n\t\tCofense Client Secret \n\t\tAzure Client ID \n\t\tAzure Client Secret \n\t\tAzure Tenant ID \n\t\tAzure Resource Group Name \n\t\tAzure Workspace Name \n\t\tAzure Subscription ID \n\t\tThreat Level \n\t\tProxy Username (optional) \n\t\tProxy Password (optional) \n\t\tProxy URL (optional) \n\t\tProxy Port (optional) \n\t\tThrottle Limit for Non-Cofense Indicators (optional) \n\t\tLogLevel (optional) \n\t\tReports Table Name \n\t\tSchedule \n\t\tlogAnalyticsUri (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Cofense Client ID** and **Client Secret** is required. See the documentation to learn more about API on the `https:///docs/api/v2/index.html`""}, {""name"": ""Microsoft Defender for Endpoints"", ""description"": ""**Microsoft Defender for Endpoints License** is required for IndicatorCreatorToDefender function.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CofenseTriage/Data%20Connectors/CofenseTriageDataConnector/CofenseTriage_API_FunctionApp.json","true" -"","Cognni","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cognni","shieldox","cognni_for_microsoft_sentinel","2022-05-06","","","Cognni","Partner","https://cognni.ai/contact-support/","","domains","","","","","","","false","","false" -"CognniIncidents_CL","Cognni","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cognni","shieldox","cognni_for_microsoft_sentinel","2022-05-06","","","Cognni","Partner","https://cognni.ai/contact-support/","","domains","CognniSentinelDataConnector","Cognni","Cognni","The Cognni connector offers a quick and simple integration with Microsoft Sentinel. You can use Cognni to autonomously map your previously unclassified important information and detect related incidents. This allows you to recognize risks to your important information, understand the severity of the incidents, and investigate the details you need to remediate, fast enough to make a difference.","[{""title"": ""Connect to Cognni"", ""description"": ""1. Go to [Cognni integrations page](https://intelligence.cognni.ai/integrations)\n2. Click **'Connect'** on the 'Microsoft Sentinel' box\n3. Copy and paste **'workspaceId'** and **'sharedKey'** (from below) to the related fields on Cognni's integrations screen\n4. Click the **'Connect'** botton to complete the configuration. \n Soon, all your Cognni-detected incidents will be forwarded here (into Microsoft Sentinel)\n\nNot a Cognni user? [Join us](https://azuremarketplace.microsoft.com/en-us/marketplace/apps/shieldox.appsource_freetrial)"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Shared Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cognni/Data%20Connectors/CognniSentinelConnector.json","true" -"","CognyteLuminar","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CognyteLuminar","cognytetechnologiesisraelltd","microsoft-sentinel-solution-cognyte-luminar","2023-09-15","","","Cognyte Luminar","Partner","https://www.cognyte.com/contact/","","domains","","","","","","","false","","false" -"ThreatIntelligenceIndicator","CognyteLuminar","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CognyteLuminar","cognytetechnologiesisraelltd","microsoft-sentinel-solution-cognyte-luminar","2023-09-15","","","Cognyte Luminar","Partner","https://www.cognyte.com/contact/","","domains","CognyteLuminar","Cognyte Technologies Israel Ltd","Luminar IOCs and Leaked Credentials","Luminar IOCs and Leaked Credentials connector allows integration of intelligence-based IOC data and customer-related leaked records identified by Luminar.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Cognyte Luminar API to pull Luminar IOCs and Leaked Credentials into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template for Flex Consumption Plan"", ""description"": ""Use this method for automated deployment of the data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CognyteLuminar-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Application ID**, **Tenant ID**,**Client Secret**, **Luminar API Client ID**, **Luminar API Account ID**, **Luminar API Client Secret**, **Luminar Initial Fetch Date**, **TimeInterval** and deploy.\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template for Premium Plan"", ""description"": ""Use this method for automated deployment of the data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CognyteLuminar-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Application ID**, **Tenant ID**,**Client Secret**, **Luminar API Client ID**, **Luminar API Account ID**, **Luminar API Client Secret**, **Luminar Initial Fetch Date**, **TimeInterval** and deploy.\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cognyte Luminar data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> NOTE:You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CognyteLuminar-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CognyteLuminarXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\\n\\n1. In the Function App, select the Function App Name and select **Configuration**.\\n2. In the **Application settings** tab, select **+ New application setting**.\\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \\n\\tApplication ID\\n\\tTenant ID\\n\\tClient Secret\\n\\tLuminar API Client ID\\n\\tLuminar API Account ID\\n\\tLuminar API Client Secret\\n\\tLuminar Initial Fetch Date\\n\\tTimeInterval - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`\\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Luminar Client ID**, **Luminar Client Secret** and **Luminar Account ID** are required.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CognyteLuminar/Data%20Connectors/CognyteLuminar_FunctionApp.json","true" -"","CohesitySecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CohesitySecurity","cohesitydev1592001764720","cohesity_sentinel_data_connector","2022-10-10","","","Cohesity","Partner","https://support.cohesity.com/","","domains","","","","","","","false","","false" -"Cohesity_CL","CohesitySecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CohesitySecurity","cohesitydev1592001764720","cohesity_sentinel_data_connector","2022-10-10","","","Cohesity","Partner","https://support.cohesity.com/","","domains","CohesityDataConnector","Cohesity","Cohesity","The Cohesity function apps provide the ability to ingest Cohesity Datahawk ransomware alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions that connect to the Azure Blob Storage and KeyVault. This might result in additional costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/), [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) and [Azure KeyVault pricing page](https://azure.microsoft.com/pricing/details/key-vault/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Get a Cohesity DataHawk API key (see troubleshooting [instruction 1](https://github.com/Azure/Azure-Sentinel/tree/master/Solutions/CohesitySecurity/Data%20Connectors/Helios2Sentinel/IncidentProducer))**""}, {""title"": """", ""description"": ""**STEP 2 - Register Azure app ([link](https://portal.azure.com/#view/Microsoft_AAD_IAM/ActiveDirectoryMenuBlade/~/RegisteredApps)) and save Application (client) ID, Directory (tenant) ID, and Secret Value ([instructions](https://learn.microsoft.com/en-us/azure/healthcare-apis/register-application)). Grant it Azure Storage (user_impersonation) permission. Also, assign the 'Microsoft Sentinel Contributor' role to the application in the appropriate subscription.**""}, {""title"": """", ""description"": ""**STEP 3 - Deploy the connector and the associated Azure Functions**.""}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cohesity data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-Cohesity-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the parameters that you created at the previous steps\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Azure Blob Storage connection string and container name"", ""description"": ""Azure Blob Storage connection string and container name""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CohesitySecurity/Data%20Connectors/Helios2Sentinel/Cohesity_API_FunctionApp.json","true" -"","Common Event Format","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Common%20Event%20Format","azuresentinel","azure-sentinel-solution-commoneventformat","2022-05-30","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"CommonSecurityLog","Common Event Format","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Common%20Event%20Format","azuresentinel","azure-sentinel-solution-commoneventformat","2022-05-30","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CEF","Any","Common Event Format (CEF)","Common Event Format (CEF) is an industry standard format on top of Syslog messages, used by many security vendors to allow event interoperability among different platforms. By connecting your CEF logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2223902&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python --version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine\u2019s IP address.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python --version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Common%20Event%20Format/Data%20Connectors/CEF.JSON","true" -"CommonSecurityLog","Common Event Format","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Common%20Event%20Format","azuresentinel","azure-sentinel-solution-commoneventformat","2022-05-30","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CefAma","Microsoft","Common Event Format (CEF) via AMA","Common Event Format (CEF) is an industry standard format on top of Syslog messages, used by many security vendors to allow event interoperability among different platforms. By connecting your CEF logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2223547&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Enable data collection rule\u200b"", ""description"": ""> CEF Events logs are collected only from **Linux** agents."", ""instructions"": [{""type"": ""CefAma""}]}, {""instructions"": [{""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 3}, ""type"": ""InstallAgent""}]}, {""title"": ""Run the following command to install and apply the CEF collector:"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces/datasources"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace data sources"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Common%20Event%20Format/Data%20Connectors/CEF%20AMA.JSON","true" -"","Commvault Security IQ","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Commvault%20Security%20IQ","commvault","microsoft-sentinel-solution-commvaultsecurityiq","2023-08-17","","","Commvault","Partner","https://www.commvault.com/support","","domains","","","","","","","false","","false" -"CommvaultSecurityIQ_CL","Commvault Security IQ","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Commvault%20Security%20IQ","commvault","microsoft-sentinel-solution-commvaultsecurityiq","2023-08-17","","","Commvault","Partner","https://www.commvault.com/support","","domains","CommvaultSecurityIQ_CL","Commvault","CommvaultSecurityIQ","This Azure Function enables Commvault users to ingest alerts/events into their Microsoft Sentinel instance. With Analytic Rules,Microsoft Sentinel can automatically create Microsoft Sentinel incidents from incoming events and logs.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Commvault Instance to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Commvalut QSDK Token**\n\n[Follow these instructions](https://documentation.commvault.com/2024e/essential/creating_access_token.html) to create an API Token.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the CommvaultSecurityIQ data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Commvault Endpoint URL and QSDK Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": ""**Azure Resource Manager (ARM) Template**\n\nUse this method for automated deployment of the Commvault Security IQ data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CommvaultSecurityIQ-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Region**. \n3. Enter the **Workspace ID**, **Workspace Key** 'and/or Other required fields' and click Next. \n4. Click **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Commvault Environment Endpoint URL"", ""description"": ""Make sure to follow the documentation and set the secret value in KeyVault""}, {""name"": ""Commvault QSDK Token"", ""description"": ""Make sure to follow the documentation and set the secret value in KeyVault""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Commvault%20Security%20IQ/Data%20Connectors/CommvaultSecurityIQ_API_AzureFunctionApp.json","true" -"","ContinuousDiagnostics&Mitigation","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ContinuousDiagnostics%26Mitigation","azuresentinel","azure-sentinel-solution-continuousdiagnostics","2022-08-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"","Contrast Protect","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Contrast%20Protect","contrast_security","contrast_protect_azure_sentinel_solution","2021-10-20","","","Contrast Protect","Partner","https://docs.contrastsecurity.com/","","domains","","","","","","","false","","false" -"CommonSecurityLog","Contrast Protect","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Contrast%20Protect","contrast_security","contrast_protect_azure_sentinel_solution","2021-10-20","","","Contrast Protect","Partner","https://docs.contrastsecurity.com/","","domains","ContrastProtect","Contrast Security","[Deprecated] Contrast Protect via Legacy Agent","Contrast Protect mitigates security threats in production applications with runtime protection and observability. Attack event results (blocked, probed, suspicious...) and other information can be sent to Microsoft Microsoft Sentinel to blend with security information from other systems.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Configure the Contrast Protect agent to forward events to syslog as described here: https://docs.contrastsecurity.com/en/output-to-syslog.html. Generate some attack events for your application.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Contrast%20Protect/Data%20Connectors/ContrastProtect.json","true" -"CommonSecurityLog","Contrast Protect","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Contrast%20Protect","contrast_security","contrast_protect_azure_sentinel_solution","2021-10-20","","","Contrast Protect","Partner","https://docs.contrastsecurity.com/","","domains","ContrastProtectAma","Contrast Security","[Deprecated] Contrast Protect via AMA","Contrast Protect mitigates security threats in production applications with runtime protection and observability. Attack event results (blocked, probed, suspicious...) and other information can be sent to Microsoft Microsoft Sentinel to blend with security information from other systems.","[{""title"": """", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Configure the Contrast Protect agent to forward events to syslog as described here: https://docs.contrastsecurity.com/en/output-to-syslog.html. Generate some attack events for your application."", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Contrast%20Protect/Data%20Connectors/template_ContrastProtectAMA.json","true" -"","ContrastADR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ContrastADR","contrast_security","contrast_adr_azure_sentinel_solution","2025-01-18","2025-01-18","","Contrast Security","Partner","https://support.contrastsecurity.com/hc/en-us","","domains","","","","","","","false","","false" -"ContrastADRIncident_CL","ContrastADR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ContrastADR","contrast_security","contrast_adr_azure_sentinel_solution","2025-01-18","2025-01-18","","Contrast Security","Partner","https://support.contrastsecurity.com/hc/en-us","","domains","ContrastADR","Contrast Security","ContrastADR","The ContrastADR data connector provides the capability to ingest Contrast ADR attack events into Microsoft Sentinel using the ContrastADR Webhook. ContrastADR data connector can enrich the incoming webhook data with ContrastADR API enrichment calls.","[{""title"": """", ""description"": ""Use these Workspace id and primakey key as shared key in azure function app"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method to automate deployment of the ContrastADR Data Connector using ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ContrastADR-azuredeploy)\n2. Provide the following parameters: Region, Function Name, LOG_ANALYTICS_SHARED_KEY, LOG_ANALYTICS_WORKSPACE_ID ""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ContrastADR/Data%20Connectors/ContrastADR_API_FunctionApp.json","true" -"ContrastADR_CL","ContrastADR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ContrastADR","contrast_security","contrast_adr_azure_sentinel_solution","2025-01-18","2025-01-18","","Contrast Security","Partner","https://support.contrastsecurity.com/hc/en-us","","domains","ContrastADR","Contrast Security","ContrastADR","The ContrastADR data connector provides the capability to ingest Contrast ADR attack events into Microsoft Sentinel using the ContrastADR Webhook. ContrastADR data connector can enrich the incoming webhook data with ContrastADR API enrichment calls.","[{""title"": """", ""description"": ""Use these Workspace id and primakey key as shared key in azure function app"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method to automate deployment of the ContrastADR Data Connector using ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ContrastADR-azuredeploy)\n2. Provide the following parameters: Region, Function Name, LOG_ANALYTICS_SHARED_KEY, LOG_ANALYTICS_WORKSPACE_ID ""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ContrastADR/Data%20Connectors/ContrastADR_API_FunctionApp.json","true" -"","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","","","","","","","false","","false" -"Corelight_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_bacnet_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_capture_loss_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_cip_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_conn_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_conn_long_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_conn_red_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_corelight_burst_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_corelight_overall_capture_loss_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_corelight_profiling_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_datared_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_dce_rpc_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_dga_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_dhcp_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_dnp3_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_dns_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_dns_red_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_dpd_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_encrypted_dns_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_enip_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_enip_debug_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_enip_list_identity_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_etc_viz_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_files_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_files_red_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_ftp_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_generic_dns_tunnels_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_generic_icmp_tunnels_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_http2_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_http_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_http_red_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_icmp_specific_tunnels_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_intel_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_ipsec_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_irc_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_iso_cotp_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_kerberos_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_known_certs_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_known_devices_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_known_domains_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_known_hosts_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_known_names_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_known_remotes_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_known_services_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_known_users_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_local_subnets_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_local_subnets_dj_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_local_subnets_graphs_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_log4shell_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_modbus_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_mqtt_connect_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_mqtt_publish_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_mqtt_subscribe_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_mysql_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_notice_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_ntlm_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_ntp_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_ocsp_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_openflow_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_packet_filter_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_pe_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_profinet_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_profinet_dce_rpc_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_profinet_debug_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_radius_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_rdp_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_reporter_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_rfb_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_s7comm_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_signatures_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_sip_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_smartpcap_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_smartpcap_stats_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_smb_files_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_smb_mapping_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_smtp_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_smtp_links_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_snmp_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_socks_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_software_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_specific_dns_tunnels_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_ssh_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_ssl_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_ssl_red_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_stats_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_stepping_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_stun_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_stun_nat_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_suricata_corelight_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_suricata_eve_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_suricata_stats_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_suricata_zeek_stats_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_syslog_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_tds_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_tds_rpc_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_tds_sql_batch_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_traceroute_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_tunnel_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_unknown_smartpcap_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_util_stats_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_vpn_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_weird_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_weird_red_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_weird_stats_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_wireguard_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_x509_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_x509_red_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"Corelight_v2_zeek_doctor_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" -"","Cortex XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cortex%20XDR","defendlimited1682894612656","cortex_xdr_connector","2023-07-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"PaloAltoCortexXDR_Alerts_CL","Cortex XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cortex%20XDR","defendlimited1682894612656","cortex_xdr_connector","2023-07-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CortexXDRDataConnector","Microsoft","Palo Alto Cortex XDR","The [Palo Alto Cortex XDR](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/09agw06t5dpvw-cortex-xdr-rest-api) data connector allows ingesting logs from the Palo Alto Cortex XDR API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the Palo Alto Cortex XDR API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the Palo Alto Cortex XDR API \n Follow the instructions to obtain the credentials. you can also follow this [guide](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/3u3j0e7hcx8t1-get-started-with-cortex-xdr-ap-is) to generate API key.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve API URL\n 1.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 1.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 1.3. Under [**Integrations**] click on [**API Keys**].\n 1.4. In the [**Settings**] Page click on [**Copy API URL**] in the top right corner.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve API Token\n 2.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 2.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 2.3. Under [**Integrations**] click on [**API Keys**].\n 2.4. In the [**Settings**] Page click on [**New Key**] in the top right corner.\n 2.5. Choose security level, role, choose Standard and click on [**Generate**]\n 2.6. Copy the API Token, once it generated the [**API Token ID**] can be found under the ID column""}}, {""parameters"": {""label"": ""Base API URL"", ""placeholder"": ""https://api-example.xdr.au.paloaltonetworks.com"", ""type"": ""text"", ""name"": ""apiUrl""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""API Key ID"", ""placeholder"": ""API ID"", ""type"": ""text"", ""name"": ""apiId""}, ""type"": ""Textbox""}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""password"", ""name"": ""apiToken""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cortex%20XDR/Data%20Connectors/CortexXDR_ccp/DataConnectorDefinition.json","true" -"PaloAltoCortexXDR_Audit_Agent_CL","Cortex XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cortex%20XDR","defendlimited1682894612656","cortex_xdr_connector","2023-07-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CortexXDRDataConnector","Microsoft","Palo Alto Cortex XDR","The [Palo Alto Cortex XDR](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/09agw06t5dpvw-cortex-xdr-rest-api) data connector allows ingesting logs from the Palo Alto Cortex XDR API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the Palo Alto Cortex XDR API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the Palo Alto Cortex XDR API \n Follow the instructions to obtain the credentials. you can also follow this [guide](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/3u3j0e7hcx8t1-get-started-with-cortex-xdr-ap-is) to generate API key.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve API URL\n 1.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 1.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 1.3. Under [**Integrations**] click on [**API Keys**].\n 1.4. In the [**Settings**] Page click on [**Copy API URL**] in the top right corner.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve API Token\n 2.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 2.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 2.3. Under [**Integrations**] click on [**API Keys**].\n 2.4. In the [**Settings**] Page click on [**New Key**] in the top right corner.\n 2.5. Choose security level, role, choose Standard and click on [**Generate**]\n 2.6. Copy the API Token, once it generated the [**API Token ID**] can be found under the ID column""}}, {""parameters"": {""label"": ""Base API URL"", ""placeholder"": ""https://api-example.xdr.au.paloaltonetworks.com"", ""type"": ""text"", ""name"": ""apiUrl""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""API Key ID"", ""placeholder"": ""API ID"", ""type"": ""text"", ""name"": ""apiId""}, ""type"": ""Textbox""}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""password"", ""name"": ""apiToken""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cortex%20XDR/Data%20Connectors/CortexXDR_ccp/DataConnectorDefinition.json","true" -"PaloAltoCortexXDR_Audit_Management_CL","Cortex XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cortex%20XDR","defendlimited1682894612656","cortex_xdr_connector","2023-07-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CortexXDRDataConnector","Microsoft","Palo Alto Cortex XDR","The [Palo Alto Cortex XDR](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/09agw06t5dpvw-cortex-xdr-rest-api) data connector allows ingesting logs from the Palo Alto Cortex XDR API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the Palo Alto Cortex XDR API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the Palo Alto Cortex XDR API \n Follow the instructions to obtain the credentials. you can also follow this [guide](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/3u3j0e7hcx8t1-get-started-with-cortex-xdr-ap-is) to generate API key.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve API URL\n 1.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 1.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 1.3. Under [**Integrations**] click on [**API Keys**].\n 1.4. In the [**Settings**] Page click on [**Copy API URL**] in the top right corner.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve API Token\n 2.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 2.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 2.3. Under [**Integrations**] click on [**API Keys**].\n 2.4. In the [**Settings**] Page click on [**New Key**] in the top right corner.\n 2.5. Choose security level, role, choose Standard and click on [**Generate**]\n 2.6. Copy the API Token, once it generated the [**API Token ID**] can be found under the ID column""}}, {""parameters"": {""label"": ""Base API URL"", ""placeholder"": ""https://api-example.xdr.au.paloaltonetworks.com"", ""type"": ""text"", ""name"": ""apiUrl""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""API Key ID"", ""placeholder"": ""API ID"", ""type"": ""text"", ""name"": ""apiId""}, ""type"": ""Textbox""}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""password"", ""name"": ""apiToken""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cortex%20XDR/Data%20Connectors/CortexXDR_ccp/DataConnectorDefinition.json","true" -"PaloAltoCortexXDR_Endpoints_CL","Cortex XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cortex%20XDR","defendlimited1682894612656","cortex_xdr_connector","2023-07-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CortexXDRDataConnector","Microsoft","Palo Alto Cortex XDR","The [Palo Alto Cortex XDR](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/09agw06t5dpvw-cortex-xdr-rest-api) data connector allows ingesting logs from the Palo Alto Cortex XDR API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the Palo Alto Cortex XDR API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the Palo Alto Cortex XDR API \n Follow the instructions to obtain the credentials. you can also follow this [guide](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/3u3j0e7hcx8t1-get-started-with-cortex-xdr-ap-is) to generate API key.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve API URL\n 1.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 1.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 1.3. Under [**Integrations**] click on [**API Keys**].\n 1.4. In the [**Settings**] Page click on [**Copy API URL**] in the top right corner.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve API Token\n 2.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 2.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 2.3. Under [**Integrations**] click on [**API Keys**].\n 2.4. In the [**Settings**] Page click on [**New Key**] in the top right corner.\n 2.5. Choose security level, role, choose Standard and click on [**Generate**]\n 2.6. Copy the API Token, once it generated the [**API Token ID**] can be found under the ID column""}}, {""parameters"": {""label"": ""Base API URL"", ""placeholder"": ""https://api-example.xdr.au.paloaltonetworks.com"", ""type"": ""text"", ""name"": ""apiUrl""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""API Key ID"", ""placeholder"": ""API ID"", ""type"": ""text"", ""name"": ""apiId""}, ""type"": ""Textbox""}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""password"", ""name"": ""apiToken""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cortex%20XDR/Data%20Connectors/CortexXDR_ccp/DataConnectorDefinition.json","true" -"PaloAltoCortexXDR_Incidents_CL","Cortex XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cortex%20XDR","defendlimited1682894612656","cortex_xdr_connector","2023-07-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CortexXDRDataConnector","Microsoft","Palo Alto Cortex XDR","The [Palo Alto Cortex XDR](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/09agw06t5dpvw-cortex-xdr-rest-api) data connector allows ingesting logs from the Palo Alto Cortex XDR API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the Palo Alto Cortex XDR API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the Palo Alto Cortex XDR API \n Follow the instructions to obtain the credentials. you can also follow this [guide](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/3u3j0e7hcx8t1-get-started-with-cortex-xdr-ap-is) to generate API key.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve API URL\n 1.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 1.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 1.3. Under [**Integrations**] click on [**API Keys**].\n 1.4. In the [**Settings**] Page click on [**Copy API URL**] in the top right corner.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve API Token\n 2.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 2.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 2.3. Under [**Integrations**] click on [**API Keys**].\n 2.4. In the [**Settings**] Page click on [**New Key**] in the top right corner.\n 2.5. Choose security level, role, choose Standard and click on [**Generate**]\n 2.6. Copy the API Token, once it generated the [**API Token ID**] can be found under the ID column""}}, {""parameters"": {""label"": ""Base API URL"", ""placeholder"": ""https://api-example.xdr.au.paloaltonetworks.com"", ""type"": ""text"", ""name"": ""apiUrl""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""API Key ID"", ""placeholder"": ""API ID"", ""type"": ""text"", ""name"": ""apiId""}, ""type"": ""Textbox""}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""password"", ""name"": ""apiToken""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cortex%20XDR/Data%20Connectors/CortexXDR_ccp/DataConnectorDefinition.json","true" -"PaloAltoCortexXDR_Incidents_CL","Cortex XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cortex%20XDR","defendlimited1682894612656","cortex_xdr_connector","2023-07-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CortexXDRIncidents","DEFEND Ltd.","Cortex XDR - Incidents","Custom Data connector from DEFEND to utilise the Cortex API to ingest incidents from Cortex XDR platform into Microsoft Sentinel.","[{""description"": ""Connect Cortex XDR to Microsoft Sentinel via Cortex API to process Cortex Incidents."", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""API Endpoint, excluding the 'api-' portion (example.xdr.au.paloaltonetworks.com)"", ""placeHolderName"": ""{{fqdn}}"", ""requestObjectKey"": ""apiEndpoint""}, {""displayText"": ""API Key Id"", ""placeHolderName"": ""{{apiKeyId}}"", ""pollingKeyPaths"": [""$.request.headers.x-xdr-auth-id""]}]}, ""type"": ""APIKey""}], ""title"": ""Enable Cortex XDR API""}]","{""customs"": [{""description"": ""**Cortex API Token** is required for REST API. [See the documentation to learn more about API](https://docs.paloaltonetworks.com/cortex/cortex-xdr/cortex-xdr-api.html). Check all requirements and follow the instructions for obtaining credentials."", ""name"": ""Cortex API credentials""}], ""resourceProvider"": [{""permissionsDisplayText"": ""read and write permissions are required."", ""provider"": ""Microsoft.OperationalInsights/workspaces"", ""providerDisplayName"": ""Workspace"", ""requiredPermissions"": {""delete"": true, ""read"": true, ""write"": true}, ""scope"": ""Workspace""}, {""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""providerDisplayName"": ""Keys"", ""requiredPermissions"": {""action"": true}, ""scope"": ""Workspace""}]}","true","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cortex%20XDR/Data%20Connectors/CortexXDR_DataConnector.json","true" -"","Cribl","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cribl","criblinc1673975616879","microsoft-sentinel-solution-cribl","2024-08-01","2024-09-05","","Cribl","Partner","https://www.cribl.io/support/","","domains","","","","","","","false","","false" -"CriblAccess_CL","Cribl","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cribl","criblinc1673975616879","microsoft-sentinel-solution-cribl","2024-08-01","2024-09-05","","Cribl","Partner","https://www.cribl.io/support/","","domains","Cribl","Cribl","Cribl","The [Cribl](https://cribl.io/accelerate-cloud-migration/) connector allows you to easily connect your Cribl (Cribl Enterprise Edition - Standalone) logs with Microsoft Sentinel. This gives you more security insight into your organization's data pipelines.","[{""title"": ""Installation and setup instructions for Cribl Stream for Microsoft Sentinel"", ""description"": ""Use the documentation from this Github repository and configure Cribl Stream using \n\nhttps://docs.cribl.io/stream/usecase-azure-workspace/""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cribl/Data%20Connectors/Connector_Cribl.json","true" -"CriblAudit_CL","Cribl","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cribl","criblinc1673975616879","microsoft-sentinel-solution-cribl","2024-08-01","2024-09-05","","Cribl","Partner","https://www.cribl.io/support/","","domains","Cribl","Cribl","Cribl","The [Cribl](https://cribl.io/accelerate-cloud-migration/) connector allows you to easily connect your Cribl (Cribl Enterprise Edition - Standalone) logs with Microsoft Sentinel. This gives you more security insight into your organization's data pipelines.","[{""title"": ""Installation and setup instructions for Cribl Stream for Microsoft Sentinel"", ""description"": ""Use the documentation from this Github repository and configure Cribl Stream using \n\nhttps://docs.cribl.io/stream/usecase-azure-workspace/""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cribl/Data%20Connectors/Connector_Cribl.json","true" -"CriblInternal_CL","Cribl","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cribl","criblinc1673975616879","microsoft-sentinel-solution-cribl","2024-08-01","2024-09-05","","Cribl","Partner","https://www.cribl.io/support/","","domains","Cribl","Cribl","Cribl","The [Cribl](https://cribl.io/accelerate-cloud-migration/) connector allows you to easily connect your Cribl (Cribl Enterprise Edition - Standalone) logs with Microsoft Sentinel. This gives you more security insight into your organization's data pipelines.","[{""title"": ""Installation and setup instructions for Cribl Stream for Microsoft Sentinel"", ""description"": ""Use the documentation from this Github repository and configure Cribl Stream using \n\nhttps://docs.cribl.io/stream/usecase-azure-workspace/""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cribl/Data%20Connectors/Connector_Cribl.json","true" -"CriblUIAccess_CL","Cribl","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cribl","criblinc1673975616879","microsoft-sentinel-solution-cribl","2024-08-01","2024-09-05","","Cribl","Partner","https://www.cribl.io/support/","","domains","Cribl","Cribl","Cribl","The [Cribl](https://cribl.io/accelerate-cloud-migration/) connector allows you to easily connect your Cribl (Cribl Enterprise Edition - Standalone) logs with Microsoft Sentinel. This gives you more security insight into your organization's data pipelines.","[{""title"": ""Installation and setup instructions for Cribl Stream for Microsoft Sentinel"", ""description"": ""Use the documentation from this Github repository and configure Cribl Stream using \n\nhttps://docs.cribl.io/stream/usecase-azure-workspace/""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cribl/Data%20Connectors/Connector_Cribl.json","true" -"","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"CrowdStrikeAlerts","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeAPICCPDefinition","Microsoft","CrowdStrike API Data Connector (via Codeless Connector Framework)","The [CrowdStrike Data Connector](https://www.crowdstrike.com/) allows ingesting logs from the CrowdStrike API into Microsoft Sentinel. This connector is built on the Microsoft Sentinel Codeless Connector Platform and uses the CrowdStrike API to fetch logs for Alerts, Detections, Hosts, Incidents, and Vulnerabilities. It supports DCR-based ingestion time transformations so that queries can run more efficiently.","[{""title"": ""Configuration steps for the CrowdStrike API"", ""description"": ""Follow the instructions below to obtain your CrowdStrike API credentials. Click [here](https://github.com/Azure/Azure-Sentinel/tree/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeAPI_ccp#crowdstrike-falcon--api-data-connector-ccp-framework) for full details."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve API URL\nLog in to your CrowdStrike Console and navigate to the API section to copy your Base API URL.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve Client Credentials\nObtain your Client ID and Client Secret from the API credentials section in your CrowdStrike account.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Base API URL"", ""placeholder"": ""https://api.us-2.crowdstrike.com"", ""type"": ""text"", ""name"": ""apiUrl"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client ID"", ""placeholder"": ""Your Client ID"", ""type"": ""text"", ""name"": ""clientId"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client Secret"", ""placeholder"": ""Your Client Secret"", ""type"": ""password"", ""name"": ""clientSecret"", ""validations"": {""required"": true}}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""disconnectLabel"": ""Disconnect"", ""name"": ""toggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeAPI_ccp/CrowdStrikeAPI_Definition.json","true" -"CrowdStrikeDetections","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeAPICCPDefinition","Microsoft","CrowdStrike API Data Connector (via Codeless Connector Framework)","The [CrowdStrike Data Connector](https://www.crowdstrike.com/) allows ingesting logs from the CrowdStrike API into Microsoft Sentinel. This connector is built on the Microsoft Sentinel Codeless Connector Platform and uses the CrowdStrike API to fetch logs for Alerts, Detections, Hosts, Incidents, and Vulnerabilities. It supports DCR-based ingestion time transformations so that queries can run more efficiently.","[{""title"": ""Configuration steps for the CrowdStrike API"", ""description"": ""Follow the instructions below to obtain your CrowdStrike API credentials. Click [here](https://github.com/Azure/Azure-Sentinel/tree/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeAPI_ccp#crowdstrike-falcon--api-data-connector-ccp-framework) for full details."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve API URL\nLog in to your CrowdStrike Console and navigate to the API section to copy your Base API URL.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve Client Credentials\nObtain your Client ID and Client Secret from the API credentials section in your CrowdStrike account.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Base API URL"", ""placeholder"": ""https://api.us-2.crowdstrike.com"", ""type"": ""text"", ""name"": ""apiUrl"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client ID"", ""placeholder"": ""Your Client ID"", ""type"": ""text"", ""name"": ""clientId"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client Secret"", ""placeholder"": ""Your Client Secret"", ""type"": ""password"", ""name"": ""clientSecret"", ""validations"": {""required"": true}}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""disconnectLabel"": ""Disconnect"", ""name"": ""toggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeAPI_ccp/CrowdStrikeAPI_Definition.json","true" -"CrowdStrikeHosts","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeAPICCPDefinition","Microsoft","CrowdStrike API Data Connector (via Codeless Connector Framework)","The [CrowdStrike Data Connector](https://www.crowdstrike.com/) allows ingesting logs from the CrowdStrike API into Microsoft Sentinel. This connector is built on the Microsoft Sentinel Codeless Connector Platform and uses the CrowdStrike API to fetch logs for Alerts, Detections, Hosts, Incidents, and Vulnerabilities. It supports DCR-based ingestion time transformations so that queries can run more efficiently.","[{""title"": ""Configuration steps for the CrowdStrike API"", ""description"": ""Follow the instructions below to obtain your CrowdStrike API credentials. Click [here](https://github.com/Azure/Azure-Sentinel/tree/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeAPI_ccp#crowdstrike-falcon--api-data-connector-ccp-framework) for full details."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve API URL\nLog in to your CrowdStrike Console and navigate to the API section to copy your Base API URL.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve Client Credentials\nObtain your Client ID and Client Secret from the API credentials section in your CrowdStrike account.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Base API URL"", ""placeholder"": ""https://api.us-2.crowdstrike.com"", ""type"": ""text"", ""name"": ""apiUrl"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client ID"", ""placeholder"": ""Your Client ID"", ""type"": ""text"", ""name"": ""clientId"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client Secret"", ""placeholder"": ""Your Client Secret"", ""type"": ""password"", ""name"": ""clientSecret"", ""validations"": {""required"": true}}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""disconnectLabel"": ""Disconnect"", ""name"": ""toggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeAPI_ccp/CrowdStrikeAPI_Definition.json","true" -"CrowdStrikeIncidents","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeAPICCPDefinition","Microsoft","CrowdStrike API Data Connector (via Codeless Connector Framework)","The [CrowdStrike Data Connector](https://www.crowdstrike.com/) allows ingesting logs from the CrowdStrike API into Microsoft Sentinel. This connector is built on the Microsoft Sentinel Codeless Connector Platform and uses the CrowdStrike API to fetch logs for Alerts, Detections, Hosts, Incidents, and Vulnerabilities. It supports DCR-based ingestion time transformations so that queries can run more efficiently.","[{""title"": ""Configuration steps for the CrowdStrike API"", ""description"": ""Follow the instructions below to obtain your CrowdStrike API credentials. Click [here](https://github.com/Azure/Azure-Sentinel/tree/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeAPI_ccp#crowdstrike-falcon--api-data-connector-ccp-framework) for full details."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve API URL\nLog in to your CrowdStrike Console and navigate to the API section to copy your Base API URL.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve Client Credentials\nObtain your Client ID and Client Secret from the API credentials section in your CrowdStrike account.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Base API URL"", ""placeholder"": ""https://api.us-2.crowdstrike.com"", ""type"": ""text"", ""name"": ""apiUrl"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client ID"", ""placeholder"": ""Your Client ID"", ""type"": ""text"", ""name"": ""clientId"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client Secret"", ""placeholder"": ""Your Client Secret"", ""type"": ""password"", ""name"": ""clientSecret"", ""validations"": {""required"": true}}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""disconnectLabel"": ""Disconnect"", ""name"": ""toggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeAPI_ccp/CrowdStrikeAPI_Definition.json","true" -"CrowdStrikeVulnerabilities","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeAPICCPDefinition","Microsoft","CrowdStrike API Data Connector (via Codeless Connector Framework)","The [CrowdStrike Data Connector](https://www.crowdstrike.com/) allows ingesting logs from the CrowdStrike API into Microsoft Sentinel. This connector is built on the Microsoft Sentinel Codeless Connector Platform and uses the CrowdStrike API to fetch logs for Alerts, Detections, Hosts, Incidents, and Vulnerabilities. It supports DCR-based ingestion time transformations so that queries can run more efficiently.","[{""title"": ""Configuration steps for the CrowdStrike API"", ""description"": ""Follow the instructions below to obtain your CrowdStrike API credentials. Click [here](https://github.com/Azure/Azure-Sentinel/tree/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeAPI_ccp#crowdstrike-falcon--api-data-connector-ccp-framework) for full details."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve API URL\nLog in to your CrowdStrike Console and navigate to the API section to copy your Base API URL.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve Client Credentials\nObtain your Client ID and Client Secret from the API credentials section in your CrowdStrike account.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Base API URL"", ""placeholder"": ""https://api.us-2.crowdstrike.com"", ""type"": ""text"", ""name"": ""apiUrl"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client ID"", ""placeholder"": ""Your Client ID"", ""type"": ""text"", ""name"": ""clientId"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client Secret"", ""placeholder"": ""Your Client Secret"", ""type"": ""password"", ""name"": ""clientSecret"", ""validations"": {""required"": true}}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""disconnectLabel"": ""Disconnect"", ""name"": ""toggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeAPI_ccp/CrowdStrikeAPI_Definition.json","true" -"ThreatIntelligenceIndicator","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeFalconAdversaryIntelligence","CrowdStrike","CrowdStrike Falcon Adversary Intelligence ","The [CrowdStrike](https://www.crowdstrike.com/) Falcon Indicators of Compromise connector retrieves the Indicators of Compromise from the Falcon Intel API and uploads them [Microsoft Sentinel Threat Intel](https://learn.microsoft.com/en-us/azure/sentinel/understand-threat-intelligence).","[{""title"": """", ""description"": ""**STEP 1 - [Generate CrowdStrike API credentials](https://www.crowdstrike.com/blog/tech-center/get-access-falcon-apis/).**\n""}, {""title"": """", ""description"": ""Make sure 'Indicators (Falcon Intelligence)' scope has 'read' selected""}, {""title"": """", ""description"": ""**STEP 2 - [Register an Entra App](https://learn.microsoft.com/en-us/entra/identity-platform/quickstart-register-app) with client secret.**\n""}, {""title"": """", ""description"": ""Provide the Entra App principal with 'Microsoft Sentinel Contributor' role assignment on the respective log analytics workspace. [How to assign roles on Azure](https://learn.microsoft.com/en-us/azure/role-based-access-control/role-assignments-portal).""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the CrowdStrike Falcon Indicator of Compromise connector, have the Workspace ID (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the CrowdStrike Falcon Adversary Intelligence connector connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CrowdStrikeFalconAdversaryIntelligence-azuredeploy)\n2. Provide the following parameters: CrowdStrikeClientId, CrowdStrikeClientSecret, CrowdStrikeBaseUrl, WorkspaceId, TenantId, Indicators, AadClientId, AadClientSecret, LookBackDays""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the CrowdStrike Falcon Adversary Intelligence connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CrowdStrikeFalconAdversaryIntelligence-Functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CrowdStrikeFalconIOCXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tCROWDSTRIKE_CLIENT_ID\n\t\tCROWDSTRIKE_CLIENT_SECRET\n\t\tCROWDSTRIKE_BASE_URL\n\t\tTENANT_ID\n\t\tINDICATORS\n\t\tWorkspaceKey\n\t\tAAD_CLIENT_ID\n\t\tAAD_CLIENT_SECRET \n\t\tLOOK_BACK_DAYS \n\t\tWORKSPACE_ID \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""CrowdStrike API Client ID and Client Secret"", ""description"": ""**CROWDSTRIKE_CLIENT_ID**, **CROWDSTRIKE_CLIENT_SECRET**, **CROWDSTRIKE_BASE_URL**. CrowdStrike credentials must have Indicators (Falcon Intelligence) read scope.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeFalconAdversaryIntelligence/CrowdStrikeFalconAdversaryIntelligence_FunctionApp.json","true" -"CommonSecurityLog","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeFalconEndpointProtection","CrowdStrike","[Deprecated] CrowdStrike Falcon Endpoint Protection via Legacy Agent","The [CrowdStrike Falcon Endpoint Protection](https://www.crowdstrike.com/endpoint-security-products/) connector allows you to easily connect your CrowdStrike Falcon Event Stream with Microsoft Sentinel, to create custom dashboards, alerts, and improve investigation. This gives you more insight into your organization's endpoints and improves your security operation capabilities.

NOTE: This data connector has been deprecated, consider moving to the CCP data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Crowd Strike Falcon Endpoint Protection and load the function code or click [here](https://aka.ms/sentinel-crowdstrikefalconendpointprotection-parser), on the second line of the query, enter the hostname(s) of your CrowdStrikeFalcon device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward CrowdStrike Falcon Event Stream logs to a Syslog agent"", ""description"": ""Deploy the CrowdStrike Falcon SIEM Collector to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent.\n1. [Follow these instructions](https://www.crowdstrike.com/blog/tech-center/integrate-with-your-siem/) to deploy the SIEM Collector and forward syslog\n2. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/Connector_Syslog_CrowdStrikeFalconEndpointProtection.json","true" -"CommonSecurityLog","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeFalconEndpointProtectionAma","CrowdStrike","[Deprecated] CrowdStrike Falcon Endpoint Protection via AMA","The [CrowdStrike Falcon Endpoint Protection](https://www.crowdstrike.com/endpoint-security-products/) connector allows you to easily connect your CrowdStrike Falcon Event Stream with Microsoft Sentinel, to create custom dashboards, alerts, and improve investigation. This gives you more insight into your organization's endpoints and improves your security operation capabilities.

NOTE: This data connector has been deprecated, consider moving to the CCP data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Crowd Strike Falcon Endpoint Protection and load the function code or click [here](https://aka.ms/sentinel-crowdstrikefalconendpointprotection-parser), on the second line of the query, enter the hostname(s) of your CrowdStrikeFalcon device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Forward CrowdStrike Falcon Event Stream logs to a Syslog agent"", ""description"": ""Deploy the CrowdStrike Falcon SIEM Collector to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent.\n1. [Follow these instructions](https://www.crowdstrike.com/blog/tech-center/integrate-with-your-siem/) to deploy the SIEM Collector and forward syslog\n2. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address."", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/template_CrowdStrikeFalconEndpointProtectionAma.json","true" -"CrowdStrike_Additional_Events_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeFalconS3CCPDefinition","Microsoft","CrowdStrike Falcon Data Replicator (AWS S3) (via Codeless Connector Framework)","The Crowdstrike Falcon Data Replicator (S3) connector provides the capability to ingest FDR event datainto Microsoft Sentinel from the AWS S3 bucket where the FDR logs have been streamed. The connector provides ability to get events from Falcon Agents which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector requires an IAM role to be configured on AWS to allow access to the AWS S3 bucket and may not be suitable for environments that leverage CrowdStrike - managed buckets.

3. For environments that leverage CrowdStrike-managed buckets, please configure the CrowdStrike Falcon Data Replicator (CrowdStrike-Managed AWS S3) connector.

","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Requirements: \n In order to use the Falcon Data Replicator feature the following are required: \n 1. **Subscription:** \n 1.1. Falcon Data Replicator. \n 1.2. Falcon Insight XDR. \n 2. **Roles:** \n 2.1. Falcon Administrator.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Setup your CrowdStrike & AWS environments \n To configure access on AWS, use the following two templates provided to set up the AWS environment. This will enable sending logs from an S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). \n 2. Choose the \u2018Specify template\u2019 option, then \u2018Upload a template file\u2019 by clicking on \u2018Choose file\u2019 and selecting the appropriate CloudFormation template file provided below. click \u2018Choose file\u2019 and select the downloaded template. \n 3. Click 'Next' and 'Create stack'.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""Make sure that your bucket will be created in the same AWS region as your Falcon CID where the FDR feed is provisioned. \n | CrowdStrike region | AWS region | \n |-----------------|-----------|\n | US-1 | us-west-1 |\n | US-2 | us-west-2 | \n | EU-1 | eu-central-1 ""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS CrowdStrike resources deployment"", ""isMultiLine"": true, ""fillWith"": [""CrowdStrike""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### Using your own S3 Bucket \n In order to use your own S3 bucket you can refernace the following guide [Use your own S3 bucket](https://falcon.us-2.crowdstrike.com/documentation/page/fa572b1c/falcon-data-replicator#g4f79236) or follow this steps: \n 1. Create support case with the following Name: **Using Self S3 bucket for FDR** \n 2. Add the following information: \n 2.1. The Falcon CID where your FDR feed is provisioned \n 2.2. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.3. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.4. Do not use any partitions. ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": "" | Event type | S3 prefix | \n |-----------------|-----------|\n | Primary Events | data/ |\n | Secondary Events | fdrv2/ ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-CrowdstrikeStream"", ""text"": ""Primary Events""}, {""key"": ""Custom-CrowdStrikeSecondary"", ""text"": ""Secondary Events""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeS3FDR_ccp/DataConnectorDefinition.json","true" -"CrowdStrike_Audit_Events_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeFalconS3CCPDefinition","Microsoft","CrowdStrike Falcon Data Replicator (AWS S3) (via Codeless Connector Framework)","The Crowdstrike Falcon Data Replicator (S3) connector provides the capability to ingest FDR event datainto Microsoft Sentinel from the AWS S3 bucket where the FDR logs have been streamed. The connector provides ability to get events from Falcon Agents which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector requires an IAM role to be configured on AWS to allow access to the AWS S3 bucket and may not be suitable for environments that leverage CrowdStrike - managed buckets.

3. For environments that leverage CrowdStrike-managed buckets, please configure the CrowdStrike Falcon Data Replicator (CrowdStrike-Managed AWS S3) connector.

","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Requirements: \n In order to use the Falcon Data Replicator feature the following are required: \n 1. **Subscription:** \n 1.1. Falcon Data Replicator. \n 1.2. Falcon Insight XDR. \n 2. **Roles:** \n 2.1. Falcon Administrator.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Setup your CrowdStrike & AWS environments \n To configure access on AWS, use the following two templates provided to set up the AWS environment. This will enable sending logs from an S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). \n 2. Choose the \u2018Specify template\u2019 option, then \u2018Upload a template file\u2019 by clicking on \u2018Choose file\u2019 and selecting the appropriate CloudFormation template file provided below. click \u2018Choose file\u2019 and select the downloaded template. \n 3. Click 'Next' and 'Create stack'.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""Make sure that your bucket will be created in the same AWS region as your Falcon CID where the FDR feed is provisioned. \n | CrowdStrike region | AWS region | \n |-----------------|-----------|\n | US-1 | us-west-1 |\n | US-2 | us-west-2 | \n | EU-1 | eu-central-1 ""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS CrowdStrike resources deployment"", ""isMultiLine"": true, ""fillWith"": [""CrowdStrike""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### Using your own S3 Bucket \n In order to use your own S3 bucket you can refernace the following guide [Use your own S3 bucket](https://falcon.us-2.crowdstrike.com/documentation/page/fa572b1c/falcon-data-replicator#g4f79236) or follow this steps: \n 1. Create support case with the following Name: **Using Self S3 bucket for FDR** \n 2. Add the following information: \n 2.1. The Falcon CID where your FDR feed is provisioned \n 2.2. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.3. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.4. Do not use any partitions. ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": "" | Event type | S3 prefix | \n |-----------------|-----------|\n | Primary Events | data/ |\n | Secondary Events | fdrv2/ ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-CrowdstrikeStream"", ""text"": ""Primary Events""}, {""key"": ""Custom-CrowdStrikeSecondary"", ""text"": ""Secondary Events""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeS3FDR_ccp/DataConnectorDefinition.json","true" -"CrowdStrike_Auth_Events_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeFalconS3CCPDefinition","Microsoft","CrowdStrike Falcon Data Replicator (AWS S3) (via Codeless Connector Framework)","The Crowdstrike Falcon Data Replicator (S3) connector provides the capability to ingest FDR event datainto Microsoft Sentinel from the AWS S3 bucket where the FDR logs have been streamed. The connector provides ability to get events from Falcon Agents which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector requires an IAM role to be configured on AWS to allow access to the AWS S3 bucket and may not be suitable for environments that leverage CrowdStrike - managed buckets.

3. For environments that leverage CrowdStrike-managed buckets, please configure the CrowdStrike Falcon Data Replicator (CrowdStrike-Managed AWS S3) connector.

","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Requirements: \n In order to use the Falcon Data Replicator feature the following are required: \n 1. **Subscription:** \n 1.1. Falcon Data Replicator. \n 1.2. Falcon Insight XDR. \n 2. **Roles:** \n 2.1. Falcon Administrator.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Setup your CrowdStrike & AWS environments \n To configure access on AWS, use the following two templates provided to set up the AWS environment. This will enable sending logs from an S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). \n 2. Choose the \u2018Specify template\u2019 option, then \u2018Upload a template file\u2019 by clicking on \u2018Choose file\u2019 and selecting the appropriate CloudFormation template file provided below. click \u2018Choose file\u2019 and select the downloaded template. \n 3. Click 'Next' and 'Create stack'.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""Make sure that your bucket will be created in the same AWS region as your Falcon CID where the FDR feed is provisioned. \n | CrowdStrike region | AWS region | \n |-----------------|-----------|\n | US-1 | us-west-1 |\n | US-2 | us-west-2 | \n | EU-1 | eu-central-1 ""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS CrowdStrike resources deployment"", ""isMultiLine"": true, ""fillWith"": [""CrowdStrike""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### Using your own S3 Bucket \n In order to use your own S3 bucket you can refernace the following guide [Use your own S3 bucket](https://falcon.us-2.crowdstrike.com/documentation/page/fa572b1c/falcon-data-replicator#g4f79236) or follow this steps: \n 1. Create support case with the following Name: **Using Self S3 bucket for FDR** \n 2. Add the following information: \n 2.1. The Falcon CID where your FDR feed is provisioned \n 2.2. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.3. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.4. Do not use any partitions. ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": "" | Event type | S3 prefix | \n |-----------------|-----------|\n | Primary Events | data/ |\n | Secondary Events | fdrv2/ ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-CrowdstrikeStream"", ""text"": ""Primary Events""}, {""key"": ""Custom-CrowdStrikeSecondary"", ""text"": ""Secondary Events""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeS3FDR_ccp/DataConnectorDefinition.json","true" -"CrowdStrike_DNS_Events_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeFalconS3CCPDefinition","Microsoft","CrowdStrike Falcon Data Replicator (AWS S3) (via Codeless Connector Framework)","The Crowdstrike Falcon Data Replicator (S3) connector provides the capability to ingest FDR event datainto Microsoft Sentinel from the AWS S3 bucket where the FDR logs have been streamed. The connector provides ability to get events from Falcon Agents which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector requires an IAM role to be configured on AWS to allow access to the AWS S3 bucket and may not be suitable for environments that leverage CrowdStrike - managed buckets.

3. For environments that leverage CrowdStrike-managed buckets, please configure the CrowdStrike Falcon Data Replicator (CrowdStrike-Managed AWS S3) connector.

","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Requirements: \n In order to use the Falcon Data Replicator feature the following are required: \n 1. **Subscription:** \n 1.1. Falcon Data Replicator. \n 1.2. Falcon Insight XDR. \n 2. **Roles:** \n 2.1. Falcon Administrator.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Setup your CrowdStrike & AWS environments \n To configure access on AWS, use the following two templates provided to set up the AWS environment. This will enable sending logs from an S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). \n 2. Choose the \u2018Specify template\u2019 option, then \u2018Upload a template file\u2019 by clicking on \u2018Choose file\u2019 and selecting the appropriate CloudFormation template file provided below. click \u2018Choose file\u2019 and select the downloaded template. \n 3. Click 'Next' and 'Create stack'.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""Make sure that your bucket will be created in the same AWS region as your Falcon CID where the FDR feed is provisioned. \n | CrowdStrike region | AWS region | \n |-----------------|-----------|\n | US-1 | us-west-1 |\n | US-2 | us-west-2 | \n | EU-1 | eu-central-1 ""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS CrowdStrike resources deployment"", ""isMultiLine"": true, ""fillWith"": [""CrowdStrike""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### Using your own S3 Bucket \n In order to use your own S3 bucket you can refernace the following guide [Use your own S3 bucket](https://falcon.us-2.crowdstrike.com/documentation/page/fa572b1c/falcon-data-replicator#g4f79236) or follow this steps: \n 1. Create support case with the following Name: **Using Self S3 bucket for FDR** \n 2. Add the following information: \n 2.1. The Falcon CID where your FDR feed is provisioned \n 2.2. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.3. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.4. Do not use any partitions. ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": "" | Event type | S3 prefix | \n |-----------------|-----------|\n | Primary Events | data/ |\n | Secondary Events | fdrv2/ ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-CrowdstrikeStream"", ""text"": ""Primary Events""}, {""key"": ""Custom-CrowdStrikeSecondary"", ""text"": ""Secondary Events""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeS3FDR_ccp/DataConnectorDefinition.json","true" -"CrowdStrike_File_Events_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeFalconS3CCPDefinition","Microsoft","CrowdStrike Falcon Data Replicator (AWS S3) (via Codeless Connector Framework)","The Crowdstrike Falcon Data Replicator (S3) connector provides the capability to ingest FDR event datainto Microsoft Sentinel from the AWS S3 bucket where the FDR logs have been streamed. The connector provides ability to get events from Falcon Agents which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector requires an IAM role to be configured on AWS to allow access to the AWS S3 bucket and may not be suitable for environments that leverage CrowdStrike - managed buckets.

3. For environments that leverage CrowdStrike-managed buckets, please configure the CrowdStrike Falcon Data Replicator (CrowdStrike-Managed AWS S3) connector.

","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Requirements: \n In order to use the Falcon Data Replicator feature the following are required: \n 1. **Subscription:** \n 1.1. Falcon Data Replicator. \n 1.2. Falcon Insight XDR. \n 2. **Roles:** \n 2.1. Falcon Administrator.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Setup your CrowdStrike & AWS environments \n To configure access on AWS, use the following two templates provided to set up the AWS environment. This will enable sending logs from an S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). \n 2. Choose the \u2018Specify template\u2019 option, then \u2018Upload a template file\u2019 by clicking on \u2018Choose file\u2019 and selecting the appropriate CloudFormation template file provided below. click \u2018Choose file\u2019 and select the downloaded template. \n 3. Click 'Next' and 'Create stack'.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""Make sure that your bucket will be created in the same AWS region as your Falcon CID where the FDR feed is provisioned. \n | CrowdStrike region | AWS region | \n |-----------------|-----------|\n | US-1 | us-west-1 |\n | US-2 | us-west-2 | \n | EU-1 | eu-central-1 ""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS CrowdStrike resources deployment"", ""isMultiLine"": true, ""fillWith"": [""CrowdStrike""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### Using your own S3 Bucket \n In order to use your own S3 bucket you can refernace the following guide [Use your own S3 bucket](https://falcon.us-2.crowdstrike.com/documentation/page/fa572b1c/falcon-data-replicator#g4f79236) or follow this steps: \n 1. Create support case with the following Name: **Using Self S3 bucket for FDR** \n 2. Add the following information: \n 2.1. The Falcon CID where your FDR feed is provisioned \n 2.2. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.3. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.4. Do not use any partitions. ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": "" | Event type | S3 prefix | \n |-----------------|-----------|\n | Primary Events | data/ |\n | Secondary Events | fdrv2/ ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-CrowdstrikeStream"", ""text"": ""Primary Events""}, {""key"": ""Custom-CrowdStrikeSecondary"", ""text"": ""Secondary Events""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeS3FDR_ccp/DataConnectorDefinition.json","true" -"CrowdStrike_Network_Events_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeFalconS3CCPDefinition","Microsoft","CrowdStrike Falcon Data Replicator (AWS S3) (via Codeless Connector Framework)","The Crowdstrike Falcon Data Replicator (S3) connector provides the capability to ingest FDR event datainto Microsoft Sentinel from the AWS S3 bucket where the FDR logs have been streamed. The connector provides ability to get events from Falcon Agents which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector requires an IAM role to be configured on AWS to allow access to the AWS S3 bucket and may not be suitable for environments that leverage CrowdStrike - managed buckets.

3. For environments that leverage CrowdStrike-managed buckets, please configure the CrowdStrike Falcon Data Replicator (CrowdStrike-Managed AWS S3) connector.

","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Requirements: \n In order to use the Falcon Data Replicator feature the following are required: \n 1. **Subscription:** \n 1.1. Falcon Data Replicator. \n 1.2. Falcon Insight XDR. \n 2. **Roles:** \n 2.1. Falcon Administrator.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Setup your CrowdStrike & AWS environments \n To configure access on AWS, use the following two templates provided to set up the AWS environment. This will enable sending logs from an S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). \n 2. Choose the \u2018Specify template\u2019 option, then \u2018Upload a template file\u2019 by clicking on \u2018Choose file\u2019 and selecting the appropriate CloudFormation template file provided below. click \u2018Choose file\u2019 and select the downloaded template. \n 3. Click 'Next' and 'Create stack'.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""Make sure that your bucket will be created in the same AWS region as your Falcon CID where the FDR feed is provisioned. \n | CrowdStrike region | AWS region | \n |-----------------|-----------|\n | US-1 | us-west-1 |\n | US-2 | us-west-2 | \n | EU-1 | eu-central-1 ""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS CrowdStrike resources deployment"", ""isMultiLine"": true, ""fillWith"": [""CrowdStrike""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### Using your own S3 Bucket \n In order to use your own S3 bucket you can refernace the following guide [Use your own S3 bucket](https://falcon.us-2.crowdstrike.com/documentation/page/fa572b1c/falcon-data-replicator#g4f79236) or follow this steps: \n 1. Create support case with the following Name: **Using Self S3 bucket for FDR** \n 2. Add the following information: \n 2.1. The Falcon CID where your FDR feed is provisioned \n 2.2. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.3. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.4. Do not use any partitions. ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": "" | Event type | S3 prefix | \n |-----------------|-----------|\n | Primary Events | data/ |\n | Secondary Events | fdrv2/ ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-CrowdstrikeStream"", ""text"": ""Primary Events""}, {""key"": ""Custom-CrowdStrikeSecondary"", ""text"": ""Secondary Events""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeS3FDR_ccp/DataConnectorDefinition.json","true" -"CrowdStrike_Process_Events_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeFalconS3CCPDefinition","Microsoft","CrowdStrike Falcon Data Replicator (AWS S3) (via Codeless Connector Framework)","The Crowdstrike Falcon Data Replicator (S3) connector provides the capability to ingest FDR event datainto Microsoft Sentinel from the AWS S3 bucket where the FDR logs have been streamed. The connector provides ability to get events from Falcon Agents which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector requires an IAM role to be configured on AWS to allow access to the AWS S3 bucket and may not be suitable for environments that leverage CrowdStrike - managed buckets.

3. For environments that leverage CrowdStrike-managed buckets, please configure the CrowdStrike Falcon Data Replicator (CrowdStrike-Managed AWS S3) connector.

","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Requirements: \n In order to use the Falcon Data Replicator feature the following are required: \n 1. **Subscription:** \n 1.1. Falcon Data Replicator. \n 1.2. Falcon Insight XDR. \n 2. **Roles:** \n 2.1. Falcon Administrator.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Setup your CrowdStrike & AWS environments \n To configure access on AWS, use the following two templates provided to set up the AWS environment. This will enable sending logs from an S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). \n 2. Choose the \u2018Specify template\u2019 option, then \u2018Upload a template file\u2019 by clicking on \u2018Choose file\u2019 and selecting the appropriate CloudFormation template file provided below. click \u2018Choose file\u2019 and select the downloaded template. \n 3. Click 'Next' and 'Create stack'.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""Make sure that your bucket will be created in the same AWS region as your Falcon CID where the FDR feed is provisioned. \n | CrowdStrike region | AWS region | \n |-----------------|-----------|\n | US-1 | us-west-1 |\n | US-2 | us-west-2 | \n | EU-1 | eu-central-1 ""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS CrowdStrike resources deployment"", ""isMultiLine"": true, ""fillWith"": [""CrowdStrike""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### Using your own S3 Bucket \n In order to use your own S3 bucket you can refernace the following guide [Use your own S3 bucket](https://falcon.us-2.crowdstrike.com/documentation/page/fa572b1c/falcon-data-replicator#g4f79236) or follow this steps: \n 1. Create support case with the following Name: **Using Self S3 bucket for FDR** \n 2. Add the following information: \n 2.1. The Falcon CID where your FDR feed is provisioned \n 2.2. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.3. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.4. Do not use any partitions. ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": "" | Event type | S3 prefix | \n |-----------------|-----------|\n | Primary Events | data/ |\n | Secondary Events | fdrv2/ ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-CrowdstrikeStream"", ""text"": ""Primary Events""}, {""key"": ""Custom-CrowdStrikeSecondary"", ""text"": ""Secondary Events""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeS3FDR_ccp/DataConnectorDefinition.json","true" -"CrowdStrike_Registry_Events_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeFalconS3CCPDefinition","Microsoft","CrowdStrike Falcon Data Replicator (AWS S3) (via Codeless Connector Framework)","The Crowdstrike Falcon Data Replicator (S3) connector provides the capability to ingest FDR event datainto Microsoft Sentinel from the AWS S3 bucket where the FDR logs have been streamed. The connector provides ability to get events from Falcon Agents which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector requires an IAM role to be configured on AWS to allow access to the AWS S3 bucket and may not be suitable for environments that leverage CrowdStrike - managed buckets.

3. For environments that leverage CrowdStrike-managed buckets, please configure the CrowdStrike Falcon Data Replicator (CrowdStrike-Managed AWS S3) connector.

","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Requirements: \n In order to use the Falcon Data Replicator feature the following are required: \n 1. **Subscription:** \n 1.1. Falcon Data Replicator. \n 1.2. Falcon Insight XDR. \n 2. **Roles:** \n 2.1. Falcon Administrator.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Setup your CrowdStrike & AWS environments \n To configure access on AWS, use the following two templates provided to set up the AWS environment. This will enable sending logs from an S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). \n 2. Choose the \u2018Specify template\u2019 option, then \u2018Upload a template file\u2019 by clicking on \u2018Choose file\u2019 and selecting the appropriate CloudFormation template file provided below. click \u2018Choose file\u2019 and select the downloaded template. \n 3. Click 'Next' and 'Create stack'.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""Make sure that your bucket will be created in the same AWS region as your Falcon CID where the FDR feed is provisioned. \n | CrowdStrike region | AWS region | \n |-----------------|-----------|\n | US-1 | us-west-1 |\n | US-2 | us-west-2 | \n | EU-1 | eu-central-1 ""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS CrowdStrike resources deployment"", ""isMultiLine"": true, ""fillWith"": [""CrowdStrike""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### Using your own S3 Bucket \n In order to use your own S3 bucket you can refernace the following guide [Use your own S3 bucket](https://falcon.us-2.crowdstrike.com/documentation/page/fa572b1c/falcon-data-replicator#g4f79236) or follow this steps: \n 1. Create support case with the following Name: **Using Self S3 bucket for FDR** \n 2. Add the following information: \n 2.1. The Falcon CID where your FDR feed is provisioned \n 2.2. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.3. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.4. Do not use any partitions. ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": "" | Event type | S3 prefix | \n |-----------------|-----------|\n | Primary Events | data/ |\n | Secondary Events | fdrv2/ ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-CrowdstrikeStream"", ""text"": ""Primary Events""}, {""key"": ""Custom-CrowdStrikeSecondary"", ""text"": ""Secondary Events""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeS3FDR_ccp/DataConnectorDefinition.json","true" -"CrowdStrike_Secondary_Data_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeFalconS3CCPDefinition","Microsoft","CrowdStrike Falcon Data Replicator (AWS S3) (via Codeless Connector Framework)","The Crowdstrike Falcon Data Replicator (S3) connector provides the capability to ingest FDR event datainto Microsoft Sentinel from the AWS S3 bucket where the FDR logs have been streamed. The connector provides ability to get events from Falcon Agents which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector requires an IAM role to be configured on AWS to allow access to the AWS S3 bucket and may not be suitable for environments that leverage CrowdStrike - managed buckets.

3. For environments that leverage CrowdStrike-managed buckets, please configure the CrowdStrike Falcon Data Replicator (CrowdStrike-Managed AWS S3) connector.

","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Requirements: \n In order to use the Falcon Data Replicator feature the following are required: \n 1. **Subscription:** \n 1.1. Falcon Data Replicator. \n 1.2. Falcon Insight XDR. \n 2. **Roles:** \n 2.1. Falcon Administrator.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Setup your CrowdStrike & AWS environments \n To configure access on AWS, use the following two templates provided to set up the AWS environment. This will enable sending logs from an S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). \n 2. Choose the \u2018Specify template\u2019 option, then \u2018Upload a template file\u2019 by clicking on \u2018Choose file\u2019 and selecting the appropriate CloudFormation template file provided below. click \u2018Choose file\u2019 and select the downloaded template. \n 3. Click 'Next' and 'Create stack'.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""Make sure that your bucket will be created in the same AWS region as your Falcon CID where the FDR feed is provisioned. \n | CrowdStrike region | AWS region | \n |-----------------|-----------|\n | US-1 | us-west-1 |\n | US-2 | us-west-2 | \n | EU-1 | eu-central-1 ""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS CrowdStrike resources deployment"", ""isMultiLine"": true, ""fillWith"": [""CrowdStrike""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### Using your own S3 Bucket \n In order to use your own S3 bucket you can refernace the following guide [Use your own S3 bucket](https://falcon.us-2.crowdstrike.com/documentation/page/fa572b1c/falcon-data-replicator#g4f79236) or follow this steps: \n 1. Create support case with the following Name: **Using Self S3 bucket for FDR** \n 2. Add the following information: \n 2.1. The Falcon CID where your FDR feed is provisioned \n 2.2. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.3. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.4. Do not use any partitions. ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": "" | Event type | S3 prefix | \n |-----------------|-----------|\n | Primary Events | data/ |\n | Secondary Events | fdrv2/ ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-CrowdstrikeStream"", ""text"": ""Primary Events""}, {""key"": ""Custom-CrowdStrikeSecondary"", ""text"": ""Secondary Events""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeS3FDR_ccp/DataConnectorDefinition.json","true" -"CrowdStrike_User_Events_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeFalconS3CCPDefinition","Microsoft","CrowdStrike Falcon Data Replicator (AWS S3) (via Codeless Connector Framework)","The Crowdstrike Falcon Data Replicator (S3) connector provides the capability to ingest FDR event datainto Microsoft Sentinel from the AWS S3 bucket where the FDR logs have been streamed. The connector provides ability to get events from Falcon Agents which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector requires an IAM role to be configured on AWS to allow access to the AWS S3 bucket and may not be suitable for environments that leverage CrowdStrike - managed buckets.

3. For environments that leverage CrowdStrike-managed buckets, please configure the CrowdStrike Falcon Data Replicator (CrowdStrike-Managed AWS S3) connector.

","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Requirements: \n In order to use the Falcon Data Replicator feature the following are required: \n 1. **Subscription:** \n 1.1. Falcon Data Replicator. \n 1.2. Falcon Insight XDR. \n 2. **Roles:** \n 2.1. Falcon Administrator.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Setup your CrowdStrike & AWS environments \n To configure access on AWS, use the following two templates provided to set up the AWS environment. This will enable sending logs from an S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). \n 2. Choose the \u2018Specify template\u2019 option, then \u2018Upload a template file\u2019 by clicking on \u2018Choose file\u2019 and selecting the appropriate CloudFormation template file provided below. click \u2018Choose file\u2019 and select the downloaded template. \n 3. Click 'Next' and 'Create stack'.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""Make sure that your bucket will be created in the same AWS region as your Falcon CID where the FDR feed is provisioned. \n | CrowdStrike region | AWS region | \n |-----------------|-----------|\n | US-1 | us-west-1 |\n | US-2 | us-west-2 | \n | EU-1 | eu-central-1 ""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS CrowdStrike resources deployment"", ""isMultiLine"": true, ""fillWith"": [""CrowdStrike""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### Using your own S3 Bucket \n In order to use your own S3 bucket you can refernace the following guide [Use your own S3 bucket](https://falcon.us-2.crowdstrike.com/documentation/page/fa572b1c/falcon-data-replicator#g4f79236) or follow this steps: \n 1. Create support case with the following Name: **Using Self S3 bucket for FDR** \n 2. Add the following information: \n 2.1. The Falcon CID where your FDR feed is provisioned \n 2.2. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.3. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.4. Do not use any partitions. ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": "" | Event type | S3 prefix | \n |-----------------|-----------|\n | Primary Events | data/ |\n | Secondary Events | fdrv2/ ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-CrowdstrikeStream"", ""text"": ""Primary Events""}, {""key"": ""Custom-CrowdStrikeSecondary"", ""text"": ""Secondary Events""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeS3FDR_ccp/DataConnectorDefinition.json","true" -"ASimAuthenticationEventLogs_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdstrikeReplicatorv2","Crowdstrike","CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)","This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the AWS SQS / S3 to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\n\n>**(Optional Step)** Securely store API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Prerequisites"", ""description"": ""1. Configure FDR in CrowdStrike - You must contact the [CrowdStrike support team](https://supportportal.crowdstrike.com/) to enable CrowdStrike FDR.\n\t - Once CrowdStrike FDR is enabled, from the CrowdStrike console, navigate to Support --> API Clients and Keys. \n\t - You need to Create new credentials to copy the AWS Access Key ID, AWS Secret Access Key, SQS Queue URL and AWS Region. \n2. Register AAD application - For DCR to authentiate to ingest data into log analytics, you must use AAD application. \n\t - [Follow the instructions here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-azure-ad-application) (steps 1-5) to get **AAD Tenant Id**, **AAD Client Id** and **AAD Client Secret**. \n\t - For **AAD Principal** Id of this application, access the AAD App through [AAD Portal](https://aad.portal.azure.com/#view/Microsoft_AAD_IAM/StartboardApplicationsMenuBlade/~/AppAppsPreview/menuId/) and capture Object Id from the application overview page.""}, {""title"": ""Deployment Options"", ""description"": ""Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Crowdstrike Falcon Data Replicator connector V2 using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-gov) \t\t\t\n2. Provide the required details such as Microsoft Sentinel Workspace, CrowdStrike AWS credentials, Azure AD Application details and ingestion configurations \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. It is recommended to create a new Resource Group for deployment of function app and associated resources.\n3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n4. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Crowdstrike Falcon Data Replicator connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy DCE, DCR and Custom Tables for data ingestion**\n\n1. Deploy the required DCE, DCR(s) and the Custom Tables by using the [Data Collection Resource ARM template](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-data-resource) \n2. After successful deployment of DCE and DCR(s), get the below information and keep it handy (required during Azure Functions app deployment).\n\t - DCE log ingestion - Follow the instructions available at [Create data collection endpoint](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-data-collection-endpoint) (Step 3).\n\t - Immutable Ids of one or more DCRs (as applicable) - Follow the instructions available at [Collect information from the DCR](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#collect-information-from-the-dcr) (Stpe 2).""}, {""title"": """", ""description"": ""**2. Deploy a Function App**\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tAWS_KEY\n\t\tAWS_SECRET\n\t\tAWS_REGION_NAME\n\t\tQUEUE_URL\n\t\tUSER_SELECTION_REQUIRE_RAW //True if raw data is required\n\t\tUSER_SELECTION_REQUIRE_SECONDARY //True if secondary data is required\n\t\tMAX_QUEUE_MESSAGES_MAIN_QUEUE // 100 for consumption and 150 for Premium\n\t\tMAX_SCRIPT_EXEC_TIME_MINUTES // add the value of 10 here\n\t\tAZURE_TENANT_ID\n\t\tAZURE_CLIENT_ID\n\t\tAZURE_CLIENT_SECRET\n\t\tDCE_INGESTION_ENDPOINT\n\t\tNORMALIZED_DCR_ID\n\t\tRAW_DATA_DCR_ID\n\t\tEVENT_TO_TABLE_MAPPING_LINK // File is present on github. Add if the file can be accessed using internet\n\t\tREQUIRED_FIELDS_SCHEMA_LINK //File is present on github. Add if the file can be accessed using internet\n\t\tSchedule //Add value as '0 */1 * * * *' to ensure the function runs every minute.\n5. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""SQS and AWS S3 account credentials/permissions"", ""description"": ""**AWS_SECRET**, **AWS_REGION_NAME**, **AWS_KEY**, **QUEUE_URL** is required. [See the documentation to learn more about data pulling](https://www.crowdstrike.com/blog/tech-center/intro-to-falcon-data-replicator/). To start, contact CrowdStrike support. At your request they will create a CrowdStrike managed Amazon Web Services (AWS) S3 bucket for short term storage purposes as well as a SQS (simple queue service) account for monitoring changes to the S3 bucket.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdstrikeReplicatorCLv2/CrowdstrikeReplicatorV2_ConnectorUI.json","true" -"ASimFileEventLogs_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdstrikeReplicatorv2","Crowdstrike","CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)","This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the AWS SQS / S3 to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\n\n>**(Optional Step)** Securely store API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Prerequisites"", ""description"": ""1. Configure FDR in CrowdStrike - You must contact the [CrowdStrike support team](https://supportportal.crowdstrike.com/) to enable CrowdStrike FDR.\n\t - Once CrowdStrike FDR is enabled, from the CrowdStrike console, navigate to Support --> API Clients and Keys. \n\t - You need to Create new credentials to copy the AWS Access Key ID, AWS Secret Access Key, SQS Queue URL and AWS Region. \n2. Register AAD application - For DCR to authentiate to ingest data into log analytics, you must use AAD application. \n\t - [Follow the instructions here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-azure-ad-application) (steps 1-5) to get **AAD Tenant Id**, **AAD Client Id** and **AAD Client Secret**. \n\t - For **AAD Principal** Id of this application, access the AAD App through [AAD Portal](https://aad.portal.azure.com/#view/Microsoft_AAD_IAM/StartboardApplicationsMenuBlade/~/AppAppsPreview/menuId/) and capture Object Id from the application overview page.""}, {""title"": ""Deployment Options"", ""description"": ""Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Crowdstrike Falcon Data Replicator connector V2 using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-gov) \t\t\t\n2. Provide the required details such as Microsoft Sentinel Workspace, CrowdStrike AWS credentials, Azure AD Application details and ingestion configurations \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. It is recommended to create a new Resource Group for deployment of function app and associated resources.\n3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n4. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Crowdstrike Falcon Data Replicator connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy DCE, DCR and Custom Tables for data ingestion**\n\n1. Deploy the required DCE, DCR(s) and the Custom Tables by using the [Data Collection Resource ARM template](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-data-resource) \n2. After successful deployment of DCE and DCR(s), get the below information and keep it handy (required during Azure Functions app deployment).\n\t - DCE log ingestion - Follow the instructions available at [Create data collection endpoint](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-data-collection-endpoint) (Step 3).\n\t - Immutable Ids of one or more DCRs (as applicable) - Follow the instructions available at [Collect information from the DCR](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#collect-information-from-the-dcr) (Stpe 2).""}, {""title"": """", ""description"": ""**2. Deploy a Function App**\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tAWS_KEY\n\t\tAWS_SECRET\n\t\tAWS_REGION_NAME\n\t\tQUEUE_URL\n\t\tUSER_SELECTION_REQUIRE_RAW //True if raw data is required\n\t\tUSER_SELECTION_REQUIRE_SECONDARY //True if secondary data is required\n\t\tMAX_QUEUE_MESSAGES_MAIN_QUEUE // 100 for consumption and 150 for Premium\n\t\tMAX_SCRIPT_EXEC_TIME_MINUTES // add the value of 10 here\n\t\tAZURE_TENANT_ID\n\t\tAZURE_CLIENT_ID\n\t\tAZURE_CLIENT_SECRET\n\t\tDCE_INGESTION_ENDPOINT\n\t\tNORMALIZED_DCR_ID\n\t\tRAW_DATA_DCR_ID\n\t\tEVENT_TO_TABLE_MAPPING_LINK // File is present on github. Add if the file can be accessed using internet\n\t\tREQUIRED_FIELDS_SCHEMA_LINK //File is present on github. Add if the file can be accessed using internet\n\t\tSchedule //Add value as '0 */1 * * * *' to ensure the function runs every minute.\n5. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""SQS and AWS S3 account credentials/permissions"", ""description"": ""**AWS_SECRET**, **AWS_REGION_NAME**, **AWS_KEY**, **QUEUE_URL** is required. [See the documentation to learn more about data pulling](https://www.crowdstrike.com/blog/tech-center/intro-to-falcon-data-replicator/). To start, contact CrowdStrike support. At your request they will create a CrowdStrike managed Amazon Web Services (AWS) S3 bucket for short term storage purposes as well as a SQS (simple queue service) account for monitoring changes to the S3 bucket.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdstrikeReplicatorCLv2/CrowdstrikeReplicatorV2_ConnectorUI.json","true" -"ASimProcessEventLogs_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdstrikeReplicatorv2","Crowdstrike","CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)","This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the AWS SQS / S3 to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\n\n>**(Optional Step)** Securely store API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Prerequisites"", ""description"": ""1. Configure FDR in CrowdStrike - You must contact the [CrowdStrike support team](https://supportportal.crowdstrike.com/) to enable CrowdStrike FDR.\n\t - Once CrowdStrike FDR is enabled, from the CrowdStrike console, navigate to Support --> API Clients and Keys. \n\t - You need to Create new credentials to copy the AWS Access Key ID, AWS Secret Access Key, SQS Queue URL and AWS Region. \n2. Register AAD application - For DCR to authentiate to ingest data into log analytics, you must use AAD application. \n\t - [Follow the instructions here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-azure-ad-application) (steps 1-5) to get **AAD Tenant Id**, **AAD Client Id** and **AAD Client Secret**. \n\t - For **AAD Principal** Id of this application, access the AAD App through [AAD Portal](https://aad.portal.azure.com/#view/Microsoft_AAD_IAM/StartboardApplicationsMenuBlade/~/AppAppsPreview/menuId/) and capture Object Id from the application overview page.""}, {""title"": ""Deployment Options"", ""description"": ""Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Crowdstrike Falcon Data Replicator connector V2 using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-gov) \t\t\t\n2. Provide the required details such as Microsoft Sentinel Workspace, CrowdStrike AWS credentials, Azure AD Application details and ingestion configurations \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. It is recommended to create a new Resource Group for deployment of function app and associated resources.\n3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n4. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Crowdstrike Falcon Data Replicator connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy DCE, DCR and Custom Tables for data ingestion**\n\n1. Deploy the required DCE, DCR(s) and the Custom Tables by using the [Data Collection Resource ARM template](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-data-resource) \n2. After successful deployment of DCE and DCR(s), get the below information and keep it handy (required during Azure Functions app deployment).\n\t - DCE log ingestion - Follow the instructions available at [Create data collection endpoint](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-data-collection-endpoint) (Step 3).\n\t - Immutable Ids of one or more DCRs (as applicable) - Follow the instructions available at [Collect information from the DCR](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#collect-information-from-the-dcr) (Stpe 2).""}, {""title"": """", ""description"": ""**2. Deploy a Function App**\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tAWS_KEY\n\t\tAWS_SECRET\n\t\tAWS_REGION_NAME\n\t\tQUEUE_URL\n\t\tUSER_SELECTION_REQUIRE_RAW //True if raw data is required\n\t\tUSER_SELECTION_REQUIRE_SECONDARY //True if secondary data is required\n\t\tMAX_QUEUE_MESSAGES_MAIN_QUEUE // 100 for consumption and 150 for Premium\n\t\tMAX_SCRIPT_EXEC_TIME_MINUTES // add the value of 10 here\n\t\tAZURE_TENANT_ID\n\t\tAZURE_CLIENT_ID\n\t\tAZURE_CLIENT_SECRET\n\t\tDCE_INGESTION_ENDPOINT\n\t\tNORMALIZED_DCR_ID\n\t\tRAW_DATA_DCR_ID\n\t\tEVENT_TO_TABLE_MAPPING_LINK // File is present on github. Add if the file can be accessed using internet\n\t\tREQUIRED_FIELDS_SCHEMA_LINK //File is present on github. Add if the file can be accessed using internet\n\t\tSchedule //Add value as '0 */1 * * * *' to ensure the function runs every minute.\n5. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""SQS and AWS S3 account credentials/permissions"", ""description"": ""**AWS_SECRET**, **AWS_REGION_NAME**, **AWS_KEY**, **QUEUE_URL** is required. [See the documentation to learn more about data pulling](https://www.crowdstrike.com/blog/tech-center/intro-to-falcon-data-replicator/). To start, contact CrowdStrike support. At your request they will create a CrowdStrike managed Amazon Web Services (AWS) S3 bucket for short term storage purposes as well as a SQS (simple queue service) account for monitoring changes to the S3 bucket.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdstrikeReplicatorCLv2/CrowdstrikeReplicatorV2_ConnectorUI.json","true" -"ASimRegistryEventLogs_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdstrikeReplicatorv2","Crowdstrike","CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)","This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the AWS SQS / S3 to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\n\n>**(Optional Step)** Securely store API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Prerequisites"", ""description"": ""1. Configure FDR in CrowdStrike - You must contact the [CrowdStrike support team](https://supportportal.crowdstrike.com/) to enable CrowdStrike FDR.\n\t - Once CrowdStrike FDR is enabled, from the CrowdStrike console, navigate to Support --> API Clients and Keys. \n\t - You need to Create new credentials to copy the AWS Access Key ID, AWS Secret Access Key, SQS Queue URL and AWS Region. \n2. Register AAD application - For DCR to authentiate to ingest data into log analytics, you must use AAD application. \n\t - [Follow the instructions here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-azure-ad-application) (steps 1-5) to get **AAD Tenant Id**, **AAD Client Id** and **AAD Client Secret**. \n\t - For **AAD Principal** Id of this application, access the AAD App through [AAD Portal](https://aad.portal.azure.com/#view/Microsoft_AAD_IAM/StartboardApplicationsMenuBlade/~/AppAppsPreview/menuId/) and capture Object Id from the application overview page.""}, {""title"": ""Deployment Options"", ""description"": ""Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Crowdstrike Falcon Data Replicator connector V2 using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-gov) \t\t\t\n2. Provide the required details such as Microsoft Sentinel Workspace, CrowdStrike AWS credentials, Azure AD Application details and ingestion configurations \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. It is recommended to create a new Resource Group for deployment of function app and associated resources.\n3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n4. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Crowdstrike Falcon Data Replicator connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy DCE, DCR and Custom Tables for data ingestion**\n\n1. Deploy the required DCE, DCR(s) and the Custom Tables by using the [Data Collection Resource ARM template](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-data-resource) \n2. After successful deployment of DCE and DCR(s), get the below information and keep it handy (required during Azure Functions app deployment).\n\t - DCE log ingestion - Follow the instructions available at [Create data collection endpoint](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-data-collection-endpoint) (Step 3).\n\t - Immutable Ids of one or more DCRs (as applicable) - Follow the instructions available at [Collect information from the DCR](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#collect-information-from-the-dcr) (Stpe 2).""}, {""title"": """", ""description"": ""**2. Deploy a Function App**\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tAWS_KEY\n\t\tAWS_SECRET\n\t\tAWS_REGION_NAME\n\t\tQUEUE_URL\n\t\tUSER_SELECTION_REQUIRE_RAW //True if raw data is required\n\t\tUSER_SELECTION_REQUIRE_SECONDARY //True if secondary data is required\n\t\tMAX_QUEUE_MESSAGES_MAIN_QUEUE // 100 for consumption and 150 for Premium\n\t\tMAX_SCRIPT_EXEC_TIME_MINUTES // add the value of 10 here\n\t\tAZURE_TENANT_ID\n\t\tAZURE_CLIENT_ID\n\t\tAZURE_CLIENT_SECRET\n\t\tDCE_INGESTION_ENDPOINT\n\t\tNORMALIZED_DCR_ID\n\t\tRAW_DATA_DCR_ID\n\t\tEVENT_TO_TABLE_MAPPING_LINK // File is present on github. Add if the file can be accessed using internet\n\t\tREQUIRED_FIELDS_SCHEMA_LINK //File is present on github. Add if the file can be accessed using internet\n\t\tSchedule //Add value as '0 */1 * * * *' to ensure the function runs every minute.\n5. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""SQS and AWS S3 account credentials/permissions"", ""description"": ""**AWS_SECRET**, **AWS_REGION_NAME**, **AWS_KEY**, **QUEUE_URL** is required. [See the documentation to learn more about data pulling](https://www.crowdstrike.com/blog/tech-center/intro-to-falcon-data-replicator/). To start, contact CrowdStrike support. At your request they will create a CrowdStrike managed Amazon Web Services (AWS) S3 bucket for short term storage purposes as well as a SQS (simple queue service) account for monitoring changes to the S3 bucket.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdstrikeReplicatorCLv2/CrowdstrikeReplicatorV2_ConnectorUI.json","true" -"ASimUserManagementLogs_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdstrikeReplicatorv2","Crowdstrike","CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)","This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the AWS SQS / S3 to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\n\n>**(Optional Step)** Securely store API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Prerequisites"", ""description"": ""1. Configure FDR in CrowdStrike - You must contact the [CrowdStrike support team](https://supportportal.crowdstrike.com/) to enable CrowdStrike FDR.\n\t - Once CrowdStrike FDR is enabled, from the CrowdStrike console, navigate to Support --> API Clients and Keys. \n\t - You need to Create new credentials to copy the AWS Access Key ID, AWS Secret Access Key, SQS Queue URL and AWS Region. \n2. Register AAD application - For DCR to authentiate to ingest data into log analytics, you must use AAD application. \n\t - [Follow the instructions here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-azure-ad-application) (steps 1-5) to get **AAD Tenant Id**, **AAD Client Id** and **AAD Client Secret**. \n\t - For **AAD Principal** Id of this application, access the AAD App through [AAD Portal](https://aad.portal.azure.com/#view/Microsoft_AAD_IAM/StartboardApplicationsMenuBlade/~/AppAppsPreview/menuId/) and capture Object Id from the application overview page.""}, {""title"": ""Deployment Options"", ""description"": ""Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Crowdstrike Falcon Data Replicator connector V2 using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-gov) \t\t\t\n2. Provide the required details such as Microsoft Sentinel Workspace, CrowdStrike AWS credentials, Azure AD Application details and ingestion configurations \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. It is recommended to create a new Resource Group for deployment of function app and associated resources.\n3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n4. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Crowdstrike Falcon Data Replicator connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy DCE, DCR and Custom Tables for data ingestion**\n\n1. Deploy the required DCE, DCR(s) and the Custom Tables by using the [Data Collection Resource ARM template](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-data-resource) \n2. After successful deployment of DCE and DCR(s), get the below information and keep it handy (required during Azure Functions app deployment).\n\t - DCE log ingestion - Follow the instructions available at [Create data collection endpoint](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-data-collection-endpoint) (Step 3).\n\t - Immutable Ids of one or more DCRs (as applicable) - Follow the instructions available at [Collect information from the DCR](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#collect-information-from-the-dcr) (Stpe 2).""}, {""title"": """", ""description"": ""**2. Deploy a Function App**\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tAWS_KEY\n\t\tAWS_SECRET\n\t\tAWS_REGION_NAME\n\t\tQUEUE_URL\n\t\tUSER_SELECTION_REQUIRE_RAW //True if raw data is required\n\t\tUSER_SELECTION_REQUIRE_SECONDARY //True if secondary data is required\n\t\tMAX_QUEUE_MESSAGES_MAIN_QUEUE // 100 for consumption and 150 for Premium\n\t\tMAX_SCRIPT_EXEC_TIME_MINUTES // add the value of 10 here\n\t\tAZURE_TENANT_ID\n\t\tAZURE_CLIENT_ID\n\t\tAZURE_CLIENT_SECRET\n\t\tDCE_INGESTION_ENDPOINT\n\t\tNORMALIZED_DCR_ID\n\t\tRAW_DATA_DCR_ID\n\t\tEVENT_TO_TABLE_MAPPING_LINK // File is present on github. Add if the file can be accessed using internet\n\t\tREQUIRED_FIELDS_SCHEMA_LINK //File is present on github. Add if the file can be accessed using internet\n\t\tSchedule //Add value as '0 */1 * * * *' to ensure the function runs every minute.\n5. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""SQS and AWS S3 account credentials/permissions"", ""description"": ""**AWS_SECRET**, **AWS_REGION_NAME**, **AWS_KEY**, **QUEUE_URL** is required. [See the documentation to learn more about data pulling](https://www.crowdstrike.com/blog/tech-center/intro-to-falcon-data-replicator/). To start, contact CrowdStrike support. At your request they will create a CrowdStrike managed Amazon Web Services (AWS) S3 bucket for short term storage purposes as well as a SQS (simple queue service) account for monitoring changes to the S3 bucket.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdstrikeReplicatorCLv2/CrowdstrikeReplicatorV2_ConnectorUI.json","true" -"CrowdStrike_Additional_Events_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdstrikeReplicatorv2","Crowdstrike","CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)","This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the AWS SQS / S3 to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\n\n>**(Optional Step)** Securely store API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Prerequisites"", ""description"": ""1. Configure FDR in CrowdStrike - You must contact the [CrowdStrike support team](https://supportportal.crowdstrike.com/) to enable CrowdStrike FDR.\n\t - Once CrowdStrike FDR is enabled, from the CrowdStrike console, navigate to Support --> API Clients and Keys. \n\t - You need to Create new credentials to copy the AWS Access Key ID, AWS Secret Access Key, SQS Queue URL and AWS Region. \n2. Register AAD application - For DCR to authentiate to ingest data into log analytics, you must use AAD application. \n\t - [Follow the instructions here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-azure-ad-application) (steps 1-5) to get **AAD Tenant Id**, **AAD Client Id** and **AAD Client Secret**. \n\t - For **AAD Principal** Id of this application, access the AAD App through [AAD Portal](https://aad.portal.azure.com/#view/Microsoft_AAD_IAM/StartboardApplicationsMenuBlade/~/AppAppsPreview/menuId/) and capture Object Id from the application overview page.""}, {""title"": ""Deployment Options"", ""description"": ""Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Crowdstrike Falcon Data Replicator connector V2 using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-gov) \t\t\t\n2. Provide the required details such as Microsoft Sentinel Workspace, CrowdStrike AWS credentials, Azure AD Application details and ingestion configurations \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. It is recommended to create a new Resource Group for deployment of function app and associated resources.\n3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n4. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Crowdstrike Falcon Data Replicator connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy DCE, DCR and Custom Tables for data ingestion**\n\n1. Deploy the required DCE, DCR(s) and the Custom Tables by using the [Data Collection Resource ARM template](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-data-resource) \n2. After successful deployment of DCE and DCR(s), get the below information and keep it handy (required during Azure Functions app deployment).\n\t - DCE log ingestion - Follow the instructions available at [Create data collection endpoint](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-data-collection-endpoint) (Step 3).\n\t - Immutable Ids of one or more DCRs (as applicable) - Follow the instructions available at [Collect information from the DCR](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#collect-information-from-the-dcr) (Stpe 2).""}, {""title"": """", ""description"": ""**2. Deploy a Function App**\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tAWS_KEY\n\t\tAWS_SECRET\n\t\tAWS_REGION_NAME\n\t\tQUEUE_URL\n\t\tUSER_SELECTION_REQUIRE_RAW //True if raw data is required\n\t\tUSER_SELECTION_REQUIRE_SECONDARY //True if secondary data is required\n\t\tMAX_QUEUE_MESSAGES_MAIN_QUEUE // 100 for consumption and 150 for Premium\n\t\tMAX_SCRIPT_EXEC_TIME_MINUTES // add the value of 10 here\n\t\tAZURE_TENANT_ID\n\t\tAZURE_CLIENT_ID\n\t\tAZURE_CLIENT_SECRET\n\t\tDCE_INGESTION_ENDPOINT\n\t\tNORMALIZED_DCR_ID\n\t\tRAW_DATA_DCR_ID\n\t\tEVENT_TO_TABLE_MAPPING_LINK // File is present on github. Add if the file can be accessed using internet\n\t\tREQUIRED_FIELDS_SCHEMA_LINK //File is present on github. Add if the file can be accessed using internet\n\t\tSchedule //Add value as '0 */1 * * * *' to ensure the function runs every minute.\n5. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""SQS and AWS S3 account credentials/permissions"", ""description"": ""**AWS_SECRET**, **AWS_REGION_NAME**, **AWS_KEY**, **QUEUE_URL** is required. [See the documentation to learn more about data pulling](https://www.crowdstrike.com/blog/tech-center/intro-to-falcon-data-replicator/). To start, contact CrowdStrike support. At your request they will create a CrowdStrike managed Amazon Web Services (AWS) S3 bucket for short term storage purposes as well as a SQS (simple queue service) account for monitoring changes to the S3 bucket.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdstrikeReplicatorCLv2/CrowdstrikeReplicatorV2_ConnectorUI.json","true" -"CrowdStrike_Secondary_Data_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdstrikeReplicatorv2","Crowdstrike","CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)","This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the AWS SQS / S3 to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\n\n>**(Optional Step)** Securely store API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Prerequisites"", ""description"": ""1. Configure FDR in CrowdStrike - You must contact the [CrowdStrike support team](https://supportportal.crowdstrike.com/) to enable CrowdStrike FDR.\n\t - Once CrowdStrike FDR is enabled, from the CrowdStrike console, navigate to Support --> API Clients and Keys. \n\t - You need to Create new credentials to copy the AWS Access Key ID, AWS Secret Access Key, SQS Queue URL and AWS Region. \n2. Register AAD application - For DCR to authentiate to ingest data into log analytics, you must use AAD application. \n\t - [Follow the instructions here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-azure-ad-application) (steps 1-5) to get **AAD Tenant Id**, **AAD Client Id** and **AAD Client Secret**. \n\t - For **AAD Principal** Id of this application, access the AAD App through [AAD Portal](https://aad.portal.azure.com/#view/Microsoft_AAD_IAM/StartboardApplicationsMenuBlade/~/AppAppsPreview/menuId/) and capture Object Id from the application overview page.""}, {""title"": ""Deployment Options"", ""description"": ""Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Crowdstrike Falcon Data Replicator connector V2 using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-gov) \t\t\t\n2. Provide the required details such as Microsoft Sentinel Workspace, CrowdStrike AWS credentials, Azure AD Application details and ingestion configurations \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. It is recommended to create a new Resource Group for deployment of function app and associated resources.\n3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n4. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Crowdstrike Falcon Data Replicator connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy DCE, DCR and Custom Tables for data ingestion**\n\n1. Deploy the required DCE, DCR(s) and the Custom Tables by using the [Data Collection Resource ARM template](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-data-resource) \n2. After successful deployment of DCE and DCR(s), get the below information and keep it handy (required during Azure Functions app deployment).\n\t - DCE log ingestion - Follow the instructions available at [Create data collection endpoint](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-data-collection-endpoint) (Step 3).\n\t - Immutable Ids of one or more DCRs (as applicable) - Follow the instructions available at [Collect information from the DCR](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#collect-information-from-the-dcr) (Stpe 2).""}, {""title"": """", ""description"": ""**2. Deploy a Function App**\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tAWS_KEY\n\t\tAWS_SECRET\n\t\tAWS_REGION_NAME\n\t\tQUEUE_URL\n\t\tUSER_SELECTION_REQUIRE_RAW //True if raw data is required\n\t\tUSER_SELECTION_REQUIRE_SECONDARY //True if secondary data is required\n\t\tMAX_QUEUE_MESSAGES_MAIN_QUEUE // 100 for consumption and 150 for Premium\n\t\tMAX_SCRIPT_EXEC_TIME_MINUTES // add the value of 10 here\n\t\tAZURE_TENANT_ID\n\t\tAZURE_CLIENT_ID\n\t\tAZURE_CLIENT_SECRET\n\t\tDCE_INGESTION_ENDPOINT\n\t\tNORMALIZED_DCR_ID\n\t\tRAW_DATA_DCR_ID\n\t\tEVENT_TO_TABLE_MAPPING_LINK // File is present on github. Add if the file can be accessed using internet\n\t\tREQUIRED_FIELDS_SCHEMA_LINK //File is present on github. Add if the file can be accessed using internet\n\t\tSchedule //Add value as '0 */1 * * * *' to ensure the function runs every minute.\n5. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""SQS and AWS S3 account credentials/permissions"", ""description"": ""**AWS_SECRET**, **AWS_REGION_NAME**, **AWS_KEY**, **QUEUE_URL** is required. [See the documentation to learn more about data pulling](https://www.crowdstrike.com/blog/tech-center/intro-to-falcon-data-replicator/). To start, contact CrowdStrike support. At your request they will create a CrowdStrike managed Amazon Web Services (AWS) S3 bucket for short term storage purposes as well as a SQS (simple queue service) account for monitoring changes to the S3 bucket.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdstrikeReplicatorCLv2/CrowdstrikeReplicatorV2_ConnectorUI.json","true" -"","CustomLogsAma","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CustomLogsAma","azuresentinel","azure-sentinel-solution-customlogsviaama","2024-07-21","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"ApacheHTTPServer_CL","CustomLogsAma","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CustomLogsAma","azuresentinel","azure-sentinel-solution-customlogsviaama","2024-07-21","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CustomlogsviaAMA","Microsoft","Custom logs via AMA","Many applications log information to text or JSON files instead of standard logging services, such as Windows Event logs, Syslog or CEF. The Custom Logs data connector allows you to collect events from files on both Windows and Linux computers and stream them to custom logs tables you created. While streaming the data you can parse and transform the contents using the DCR. After collecting the data, you can apply analytic rules, hunting, searching, threat intelligence, enrichments and more.

**NOTE: Use this connector for the following devices:** Cisco Meraki, Zscaler Private Access (ZPA), VMware vCenter, Apache HTTP server, Apache Tomcat, Jboss Enterprise application platform, Juniper IDP, MarkLogic Audit, MongoDB Audit, Nginx HTTP server, Oracle Weblogic server, PostgreSQL Events, Squid Proxy, Ubiquiti UniFi, SecurityBridge Threat detection SAP and AI vectra stream.","[{""description"": ""> Custom logs are collected from both Windows and Linux agents."", ""instructions"": [{""type"": ""CustomLogsAMA"", ""parameters"": {}}, {""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 8}, ""type"": ""InstallAgent""}], ""title"": ""Enable data collection rule""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces/datasources"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace data sources"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true}}], ""customs"": [{""name"": ""Permissions"", ""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","true","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CustomLogsAma/Data%20Connectors/CustomLogsViaAmaTemplate.json","true" -"JBossEvent_CL","CustomLogsAma","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CustomLogsAma","azuresentinel","azure-sentinel-solution-customlogsviaama","2024-07-21","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CustomlogsviaAMA","Microsoft","Custom logs via AMA","Many applications log information to text or JSON files instead of standard logging services, such as Windows Event logs, Syslog or CEF. The Custom Logs data connector allows you to collect events from files on both Windows and Linux computers and stream them to custom logs tables you created. While streaming the data you can parse and transform the contents using the DCR. After collecting the data, you can apply analytic rules, hunting, searching, threat intelligence, enrichments and more.

**NOTE: Use this connector for the following devices:** Cisco Meraki, Zscaler Private Access (ZPA), VMware vCenter, Apache HTTP server, Apache Tomcat, Jboss Enterprise application platform, Juniper IDP, MarkLogic Audit, MongoDB Audit, Nginx HTTP server, Oracle Weblogic server, PostgreSQL Events, Squid Proxy, Ubiquiti UniFi, SecurityBridge Threat detection SAP and AI vectra stream.","[{""description"": ""> Custom logs are collected from both Windows and Linux agents."", ""instructions"": [{""type"": ""CustomLogsAMA"", ""parameters"": {}}, {""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 8}, ""type"": ""InstallAgent""}], ""title"": ""Enable data collection rule""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces/datasources"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace data sources"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true}}], ""customs"": [{""name"": ""Permissions"", ""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","true","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CustomLogsAma/Data%20Connectors/CustomLogsViaAmaTemplate.json","true" -"JuniperIDP_CL","CustomLogsAma","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CustomLogsAma","azuresentinel","azure-sentinel-solution-customlogsviaama","2024-07-21","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CustomlogsviaAMA","Microsoft","Custom logs via AMA","Many applications log information to text or JSON files instead of standard logging services, such as Windows Event logs, Syslog or CEF. The Custom Logs data connector allows you to collect events from files on both Windows and Linux computers and stream them to custom logs tables you created. While streaming the data you can parse and transform the contents using the DCR. After collecting the data, you can apply analytic rules, hunting, searching, threat intelligence, enrichments and more.

**NOTE: Use this connector for the following devices:** Cisco Meraki, Zscaler Private Access (ZPA), VMware vCenter, Apache HTTP server, Apache Tomcat, Jboss Enterprise application platform, Juniper IDP, MarkLogic Audit, MongoDB Audit, Nginx HTTP server, Oracle Weblogic server, PostgreSQL Events, Squid Proxy, Ubiquiti UniFi, SecurityBridge Threat detection SAP and AI vectra stream.","[{""description"": ""> Custom logs are collected from both Windows and Linux agents."", ""instructions"": [{""type"": ""CustomLogsAMA"", ""parameters"": {}}, {""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 8}, ""type"": ""InstallAgent""}], ""title"": ""Enable data collection rule""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces/datasources"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace data sources"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true}}], ""customs"": [{""name"": ""Permissions"", ""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","true","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CustomLogsAma/Data%20Connectors/CustomLogsViaAmaTemplate.json","true" -"MarkLogicAudit_CL","CustomLogsAma","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CustomLogsAma","azuresentinel","azure-sentinel-solution-customlogsviaama","2024-07-21","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CustomlogsviaAMA","Microsoft","Custom logs via AMA","Many applications log information to text or JSON files instead of standard logging services, such as Windows Event logs, Syslog or CEF. The Custom Logs data connector allows you to collect events from files on both Windows and Linux computers and stream them to custom logs tables you created. While streaming the data you can parse and transform the contents using the DCR. After collecting the data, you can apply analytic rules, hunting, searching, threat intelligence, enrichments and more.

**NOTE: Use this connector for the following devices:** Cisco Meraki, Zscaler Private Access (ZPA), VMware vCenter, Apache HTTP server, Apache Tomcat, Jboss Enterprise application platform, Juniper IDP, MarkLogic Audit, MongoDB Audit, Nginx HTTP server, Oracle Weblogic server, PostgreSQL Events, Squid Proxy, Ubiquiti UniFi, SecurityBridge Threat detection SAP and AI vectra stream.","[{""description"": ""> Custom logs are collected from both Windows and Linux agents."", ""instructions"": [{""type"": ""CustomLogsAMA"", ""parameters"": {}}, {""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 8}, ""type"": ""InstallAgent""}], ""title"": ""Enable data collection rule""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces/datasources"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace data sources"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true}}], ""customs"": [{""name"": ""Permissions"", ""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","true","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CustomLogsAma/Data%20Connectors/CustomLogsViaAmaTemplate.json","true" -"MongoDBAudit_CL","CustomLogsAma","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CustomLogsAma","azuresentinel","azure-sentinel-solution-customlogsviaama","2024-07-21","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CustomlogsviaAMA","Microsoft","Custom logs via AMA","Many applications log information to text or JSON files instead of standard logging services, such as Windows Event logs, Syslog or CEF. The Custom Logs data connector allows you to collect events from files on both Windows and Linux computers and stream them to custom logs tables you created. While streaming the data you can parse and transform the contents using the DCR. After collecting the data, you can apply analytic rules, hunting, searching, threat intelligence, enrichments and more.

**NOTE: Use this connector for the following devices:** Cisco Meraki, Zscaler Private Access (ZPA), VMware vCenter, Apache HTTP server, Apache Tomcat, Jboss Enterprise application platform, Juniper IDP, MarkLogic Audit, MongoDB Audit, Nginx HTTP server, Oracle Weblogic server, PostgreSQL Events, Squid Proxy, Ubiquiti UniFi, SecurityBridge Threat detection SAP and AI vectra stream.","[{""description"": ""> Custom logs are collected from both Windows and Linux agents."", ""instructions"": [{""type"": ""CustomLogsAMA"", ""parameters"": {}}, {""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 8}, ""type"": ""InstallAgent""}], ""title"": ""Enable data collection rule""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces/datasources"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace data sources"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true}}], ""customs"": [{""name"": ""Permissions"", ""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","true","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CustomLogsAma/Data%20Connectors/CustomLogsViaAmaTemplate.json","true" -"NGINX_CL","CustomLogsAma","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CustomLogsAma","azuresentinel","azure-sentinel-solution-customlogsviaama","2024-07-21","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CustomlogsviaAMA","Microsoft","Custom logs via AMA","Many applications log information to text or JSON files instead of standard logging services, such as Windows Event logs, Syslog or CEF. The Custom Logs data connector allows you to collect events from files on both Windows and Linux computers and stream them to custom logs tables you created. While streaming the data you can parse and transform the contents using the DCR. After collecting the data, you can apply analytic rules, hunting, searching, threat intelligence, enrichments and more.

**NOTE: Use this connector for the following devices:** Cisco Meraki, Zscaler Private Access (ZPA), VMware vCenter, Apache HTTP server, Apache Tomcat, Jboss Enterprise application platform, Juniper IDP, MarkLogic Audit, MongoDB Audit, Nginx HTTP server, Oracle Weblogic server, PostgreSQL Events, Squid Proxy, Ubiquiti UniFi, SecurityBridge Threat detection SAP and AI vectra stream.","[{""description"": ""> Custom logs are collected from both Windows and Linux agents."", ""instructions"": [{""type"": ""CustomLogsAMA"", ""parameters"": {}}, {""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 8}, ""type"": ""InstallAgent""}], ""title"": ""Enable data collection rule""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces/datasources"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace data sources"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true}}], ""customs"": [{""name"": ""Permissions"", ""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","true","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CustomLogsAma/Data%20Connectors/CustomLogsViaAmaTemplate.json","true" -"OracleWebLogicServer_CL","CustomLogsAma","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CustomLogsAma","azuresentinel","azure-sentinel-solution-customlogsviaama","2024-07-21","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CustomlogsviaAMA","Microsoft","Custom logs via AMA","Many applications log information to text or JSON files instead of standard logging services, such as Windows Event logs, Syslog or CEF. The Custom Logs data connector allows you to collect events from files on both Windows and Linux computers and stream them to custom logs tables you created. While streaming the data you can parse and transform the contents using the DCR. After collecting the data, you can apply analytic rules, hunting, searching, threat intelligence, enrichments and more.

**NOTE: Use this connector for the following devices:** Cisco Meraki, Zscaler Private Access (ZPA), VMware vCenter, Apache HTTP server, Apache Tomcat, Jboss Enterprise application platform, Juniper IDP, MarkLogic Audit, MongoDB Audit, Nginx HTTP server, Oracle Weblogic server, PostgreSQL Events, Squid Proxy, Ubiquiti UniFi, SecurityBridge Threat detection SAP and AI vectra stream.","[{""description"": ""> Custom logs are collected from both Windows and Linux agents."", ""instructions"": [{""type"": ""CustomLogsAMA"", ""parameters"": {}}, {""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 8}, ""type"": ""InstallAgent""}], ""title"": ""Enable data collection rule""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces/datasources"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace data sources"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true}}], ""customs"": [{""name"": ""Permissions"", ""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","true","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CustomLogsAma/Data%20Connectors/CustomLogsViaAmaTemplate.json","true" -"PostgreSQL_CL","CustomLogsAma","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CustomLogsAma","azuresentinel","azure-sentinel-solution-customlogsviaama","2024-07-21","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CustomlogsviaAMA","Microsoft","Custom logs via AMA","Many applications log information to text or JSON files instead of standard logging services, such as Windows Event logs, Syslog or CEF. The Custom Logs data connector allows you to collect events from files on both Windows and Linux computers and stream them to custom logs tables you created. While streaming the data you can parse and transform the contents using the DCR. After collecting the data, you can apply analytic rules, hunting, searching, threat intelligence, enrichments and more.

**NOTE: Use this connector for the following devices:** Cisco Meraki, Zscaler Private Access (ZPA), VMware vCenter, Apache HTTP server, Apache Tomcat, Jboss Enterprise application platform, Juniper IDP, MarkLogic Audit, MongoDB Audit, Nginx HTTP server, Oracle Weblogic server, PostgreSQL Events, Squid Proxy, Ubiquiti UniFi, SecurityBridge Threat detection SAP and AI vectra stream.","[{""description"": ""> Custom logs are collected from both Windows and Linux agents."", ""instructions"": [{""type"": ""CustomLogsAMA"", ""parameters"": {}}, {""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 8}, ""type"": ""InstallAgent""}], ""title"": ""Enable data collection rule""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces/datasources"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace data sources"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true}}], ""customs"": [{""name"": ""Permissions"", ""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","true","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CustomLogsAma/Data%20Connectors/CustomLogsViaAmaTemplate.json","true" -"SecurityBridgeLogs_CL","CustomLogsAma","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CustomLogsAma","azuresentinel","azure-sentinel-solution-customlogsviaama","2024-07-21","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CustomlogsviaAMA","Microsoft","Custom logs via AMA","Many applications log information to text or JSON files instead of standard logging services, such as Windows Event logs, Syslog or CEF. The Custom Logs data connector allows you to collect events from files on both Windows and Linux computers and stream them to custom logs tables you created. While streaming the data you can parse and transform the contents using the DCR. After collecting the data, you can apply analytic rules, hunting, searching, threat intelligence, enrichments and more.

**NOTE: Use this connector for the following devices:** Cisco Meraki, Zscaler Private Access (ZPA), VMware vCenter, Apache HTTP server, Apache Tomcat, Jboss Enterprise application platform, Juniper IDP, MarkLogic Audit, MongoDB Audit, Nginx HTTP server, Oracle Weblogic server, PostgreSQL Events, Squid Proxy, Ubiquiti UniFi, SecurityBridge Threat detection SAP and AI vectra stream.","[{""description"": ""> Custom logs are collected from both Windows and Linux agents."", ""instructions"": [{""type"": ""CustomLogsAMA"", ""parameters"": {}}, {""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 8}, ""type"": ""InstallAgent""}], ""title"": ""Enable data collection rule""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces/datasources"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace data sources"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true}}], ""customs"": [{""name"": ""Permissions"", ""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","true","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CustomLogsAma/Data%20Connectors/CustomLogsViaAmaTemplate.json","true" -"SquidProxy_CL","CustomLogsAma","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CustomLogsAma","azuresentinel","azure-sentinel-solution-customlogsviaama","2024-07-21","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CustomlogsviaAMA","Microsoft","Custom logs via AMA","Many applications log information to text or JSON files instead of standard logging services, such as Windows Event logs, Syslog or CEF. The Custom Logs data connector allows you to collect events from files on both Windows and Linux computers and stream them to custom logs tables you created. While streaming the data you can parse and transform the contents using the DCR. After collecting the data, you can apply analytic rules, hunting, searching, threat intelligence, enrichments and more.

**NOTE: Use this connector for the following devices:** Cisco Meraki, Zscaler Private Access (ZPA), VMware vCenter, Apache HTTP server, Apache Tomcat, Jboss Enterprise application platform, Juniper IDP, MarkLogic Audit, MongoDB Audit, Nginx HTTP server, Oracle Weblogic server, PostgreSQL Events, Squid Proxy, Ubiquiti UniFi, SecurityBridge Threat detection SAP and AI vectra stream.","[{""description"": ""> Custom logs are collected from both Windows and Linux agents."", ""instructions"": [{""type"": ""CustomLogsAMA"", ""parameters"": {}}, {""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 8}, ""type"": ""InstallAgent""}], ""title"": ""Enable data collection rule""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces/datasources"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace data sources"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true}}], ""customs"": [{""name"": ""Permissions"", ""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","true","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CustomLogsAma/Data%20Connectors/CustomLogsViaAmaTemplate.json","true" -"Tomcat_CL","CustomLogsAma","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CustomLogsAma","azuresentinel","azure-sentinel-solution-customlogsviaama","2024-07-21","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CustomlogsviaAMA","Microsoft","Custom logs via AMA","Many applications log information to text or JSON files instead of standard logging services, such as Windows Event logs, Syslog or CEF. The Custom Logs data connector allows you to collect events from files on both Windows and Linux computers and stream them to custom logs tables you created. While streaming the data you can parse and transform the contents using the DCR. After collecting the data, you can apply analytic rules, hunting, searching, threat intelligence, enrichments and more.

**NOTE: Use this connector for the following devices:** Cisco Meraki, Zscaler Private Access (ZPA), VMware vCenter, Apache HTTP server, Apache Tomcat, Jboss Enterprise application platform, Juniper IDP, MarkLogic Audit, MongoDB Audit, Nginx HTTP server, Oracle Weblogic server, PostgreSQL Events, Squid Proxy, Ubiquiti UniFi, SecurityBridge Threat detection SAP and AI vectra stream.","[{""description"": ""> Custom logs are collected from both Windows and Linux agents."", ""instructions"": [{""type"": ""CustomLogsAMA"", ""parameters"": {}}, {""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 8}, ""type"": ""InstallAgent""}], ""title"": ""Enable data collection rule""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces/datasources"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace data sources"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true}}], ""customs"": [{""name"": ""Permissions"", ""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","true","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CustomLogsAma/Data%20Connectors/CustomLogsViaAmaTemplate.json","true" -"Ubiquiti_CL","CustomLogsAma","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CustomLogsAma","azuresentinel","azure-sentinel-solution-customlogsviaama","2024-07-21","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CustomlogsviaAMA","Microsoft","Custom logs via AMA","Many applications log information to text or JSON files instead of standard logging services, such as Windows Event logs, Syslog or CEF. The Custom Logs data connector allows you to collect events from files on both Windows and Linux computers and stream them to custom logs tables you created. While streaming the data you can parse and transform the contents using the DCR. After collecting the data, you can apply analytic rules, hunting, searching, threat intelligence, enrichments and more.

**NOTE: Use this connector for the following devices:** Cisco Meraki, Zscaler Private Access (ZPA), VMware vCenter, Apache HTTP server, Apache Tomcat, Jboss Enterprise application platform, Juniper IDP, MarkLogic Audit, MongoDB Audit, Nginx HTTP server, Oracle Weblogic server, PostgreSQL Events, Squid Proxy, Ubiquiti UniFi, SecurityBridge Threat detection SAP and AI vectra stream.","[{""description"": ""> Custom logs are collected from both Windows and Linux agents."", ""instructions"": [{""type"": ""CustomLogsAMA"", ""parameters"": {}}, {""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 8}, ""type"": ""InstallAgent""}], ""title"": ""Enable data collection rule""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces/datasources"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace data sources"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true}}], ""customs"": [{""name"": ""Permissions"", ""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","true","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CustomLogsAma/Data%20Connectors/CustomLogsViaAmaTemplate.json","true" -"VectraStream_CL","CustomLogsAma","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CustomLogsAma","azuresentinel","azure-sentinel-solution-customlogsviaama","2024-07-21","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CustomlogsviaAMA","Microsoft","Custom logs via AMA","Many applications log information to text or JSON files instead of standard logging services, such as Windows Event logs, Syslog or CEF. The Custom Logs data connector allows you to collect events from files on both Windows and Linux computers and stream them to custom logs tables you created. While streaming the data you can parse and transform the contents using the DCR. After collecting the data, you can apply analytic rules, hunting, searching, threat intelligence, enrichments and more.

**NOTE: Use this connector for the following devices:** Cisco Meraki, Zscaler Private Access (ZPA), VMware vCenter, Apache HTTP server, Apache Tomcat, Jboss Enterprise application platform, Juniper IDP, MarkLogic Audit, MongoDB Audit, Nginx HTTP server, Oracle Weblogic server, PostgreSQL Events, Squid Proxy, Ubiquiti UniFi, SecurityBridge Threat detection SAP and AI vectra stream.","[{""description"": ""> Custom logs are collected from both Windows and Linux agents."", ""instructions"": [{""type"": ""CustomLogsAMA"", ""parameters"": {}}, {""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 8}, ""type"": ""InstallAgent""}], ""title"": ""Enable data collection rule""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces/datasources"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace data sources"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true}}], ""customs"": [{""name"": ""Permissions"", ""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","true","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CustomLogsAma/Data%20Connectors/CustomLogsViaAmaTemplate.json","true" -"ZPA_CL","CustomLogsAma","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CustomLogsAma","azuresentinel","azure-sentinel-solution-customlogsviaama","2024-07-21","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CustomlogsviaAMA","Microsoft","Custom logs via AMA","Many applications log information to text or JSON files instead of standard logging services, such as Windows Event logs, Syslog or CEF. The Custom Logs data connector allows you to collect events from files on both Windows and Linux computers and stream them to custom logs tables you created. While streaming the data you can parse and transform the contents using the DCR. After collecting the data, you can apply analytic rules, hunting, searching, threat intelligence, enrichments and more.

**NOTE: Use this connector for the following devices:** Cisco Meraki, Zscaler Private Access (ZPA), VMware vCenter, Apache HTTP server, Apache Tomcat, Jboss Enterprise application platform, Juniper IDP, MarkLogic Audit, MongoDB Audit, Nginx HTTP server, Oracle Weblogic server, PostgreSQL Events, Squid Proxy, Ubiquiti UniFi, SecurityBridge Threat detection SAP and AI vectra stream.","[{""description"": ""> Custom logs are collected from both Windows and Linux agents."", ""instructions"": [{""type"": ""CustomLogsAMA"", ""parameters"": {}}, {""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 8}, ""type"": ""InstallAgent""}], ""title"": ""Enable data collection rule""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces/datasources"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace data sources"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true}}], ""customs"": [{""name"": ""Permissions"", ""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","true","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CustomLogsAma/Data%20Connectors/CustomLogsViaAmaTemplate.json","true" -"meraki_CL","CustomLogsAma","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CustomLogsAma","azuresentinel","azure-sentinel-solution-customlogsviaama","2024-07-21","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CustomlogsviaAMA","Microsoft","Custom logs via AMA","Many applications log information to text or JSON files instead of standard logging services, such as Windows Event logs, Syslog or CEF. The Custom Logs data connector allows you to collect events from files on both Windows and Linux computers and stream them to custom logs tables you created. While streaming the data you can parse and transform the contents using the DCR. After collecting the data, you can apply analytic rules, hunting, searching, threat intelligence, enrichments and more.

**NOTE: Use this connector for the following devices:** Cisco Meraki, Zscaler Private Access (ZPA), VMware vCenter, Apache HTTP server, Apache Tomcat, Jboss Enterprise application platform, Juniper IDP, MarkLogic Audit, MongoDB Audit, Nginx HTTP server, Oracle Weblogic server, PostgreSQL Events, Squid Proxy, Ubiquiti UniFi, SecurityBridge Threat detection SAP and AI vectra stream.","[{""description"": ""> Custom logs are collected from both Windows and Linux agents."", ""instructions"": [{""type"": ""CustomLogsAMA"", ""parameters"": {}}, {""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 8}, ""type"": ""InstallAgent""}], ""title"": ""Enable data collection rule""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces/datasources"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace data sources"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true}}], ""customs"": [{""name"": ""Permissions"", ""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","true","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CustomLogsAma/Data%20Connectors/CustomLogsViaAmaTemplate.json","true" -"vcenter_CL","CustomLogsAma","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CustomLogsAma","azuresentinel","azure-sentinel-solution-customlogsviaama","2024-07-21","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CustomlogsviaAMA","Microsoft","Custom logs via AMA","Many applications log information to text or JSON files instead of standard logging services, such as Windows Event logs, Syslog or CEF. The Custom Logs data connector allows you to collect events from files on both Windows and Linux computers and stream them to custom logs tables you created. While streaming the data you can parse and transform the contents using the DCR. After collecting the data, you can apply analytic rules, hunting, searching, threat intelligence, enrichments and more.

**NOTE: Use this connector for the following devices:** Cisco Meraki, Zscaler Private Access (ZPA), VMware vCenter, Apache HTTP server, Apache Tomcat, Jboss Enterprise application platform, Juniper IDP, MarkLogic Audit, MongoDB Audit, Nginx HTTP server, Oracle Weblogic server, PostgreSQL Events, Squid Proxy, Ubiquiti UniFi, SecurityBridge Threat detection SAP and AI vectra stream.","[{""description"": ""> Custom logs are collected from both Windows and Linux agents."", ""instructions"": [{""type"": ""CustomLogsAMA"", ""parameters"": {}}, {""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 8}, ""type"": ""InstallAgent""}], ""title"": ""Enable data collection rule""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces/datasources"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace data sources"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true}}], ""customs"": [{""name"": ""Permissions"", ""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","true","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CustomLogsAma/Data%20Connectors/CustomLogsViaAmaTemplate.json","true" -"","CyberArk Enterprise Password Vault (EPV) Events","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyberArk%20Enterprise%20Password%20Vault%20%28EPV%29%20Events","cyberark","cyberark_epv_events_mss","2022-05-02","","","Cyberark","Partner","https://www.cyberark.com/services-support/technical-support/","","domains","","","","","","","false","","false" -"CommonSecurityLog","CyberArk Enterprise Password Vault (EPV) Events","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyberArk%20Enterprise%20Password%20Vault%20%28EPV%29%20Events","cyberark","cyberark_epv_events_mss","2022-05-02","","","Cyberark","Partner","https://www.cyberark.com/services-support/technical-support/","","domains","CyberArk","Cyber-Ark","[Deprecated] CyberArk Enterprise Password Vault (EPV) Events via Legacy Agent","CyberArk Enterprise Password Vault generates an xml Syslog message for every action taken against the Vault. The EPV will send the xml messages through the Microsoft Sentinel.xsl translator to be converted into CEF standard format and sent to a syslog staging server of your choice (syslog-ng, rsyslog). The Log Analytics agent installed on your syslog staging server will import the messages into Microsoft Log Analytics. Refer to the [CyberArk documentation](https://docs.cyberark.com/Product-Doc/OnlineHelp/PAS/Latest/en/Content/PASIMP/DV-Integrating-with-SIEM-Applications.htm) for more guidance on SIEM integrations.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python installed on your machine.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""On the EPV configure the dbparm.ini to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machines IP address.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python installed on your machine using the following command: python -version\n\n>\n\n> 2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machines security according to your organizations security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyberArk%20Enterprise%20Password%20Vault%20%28EPV%29%20Events/Data%20Connectors/CyberArk%20Data%20Connector.json","true" -"CommonSecurityLog","CyberArk Enterprise Password Vault (EPV) Events","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyberArk%20Enterprise%20Password%20Vault%20%28EPV%29%20Events","cyberark","cyberark_epv_events_mss","2022-05-02","","","Cyberark","Partner","https://www.cyberark.com/services-support/technical-support/","","domains","CyberArkAma","Cyber-Ark","[Deprecated] CyberArk Privilege Access Manager (PAM) Events via AMA","CyberArk Privilege Access Manager generates an xml Syslog message for every action taken against the Vault. The PAM will send the xml messages through the Microsoft Sentinel.xsl translator to be converted into CEF standard format and sent to a syslog staging server of your choice (syslog-ng, rsyslog). The Log Analytics agent installed on your syslog staging server will import the messages into Microsoft Log Analytics. Refer to the [CyberArk documentation](https://docs.cyberark.com/privilege-cloud-standard/Latest/en/Content/Privilege%20Cloud/privCloud-connect-siem.htm) for more guidance on SIEM integrations.","[{""title"": """", ""description"": """", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""On the EPV configure the dbparm.ini to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machines IP address."", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machines security according to your organizations security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyberArk%20Enterprise%20Password%20Vault%20%28EPV%29%20Events/Data%20Connectors/template_CyberArkAMA.json","true" -"","CyberArkAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyberArkAudit","cyberark","cyberark_audit_sentinel","2024-03-01","","","CyberArk Support","Partner","https://www.cyberark.com/services-support/technical-support-contact/","","domains","","","","","","","false","","false" -"CyberArkAudit","CyberArkAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyberArkAudit","cyberark","cyberark_audit_sentinel","2024-03-01","","","CyberArk Support","Partner","https://www.cyberark.com/services-support/technical-support-contact/","","domains","CyberArkAudit","CyberArk","CyberArkAudit","The [CyberArk Audit](https://docs.cyberark.com/Audit/Latest/en/Content/Resources/_TopNav/cc_Home.htm) data connector provides the capability to retrieve security event logs of the CyberArk Audit service and more events into Microsoft Sentinel through the REST API. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Azure Blob Storage API to pull logs into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details.""}, {""title"": """", ""description"": "">**NOTE:** API authorization key(s) or token(s) are securely stored in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the CyberArk Audit SIEM Integration**\n\n Follow the [instructions](https://docs.cyberark.com/audit/latest/en/Content/Audit/isp_Microsoft_Sentinel.htm?tocpath=SIEM%20integrations%7C_____3) to obtain connection details and credentials.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the CyberArk Audit data connector, have the Workspace Name and Workspace Location (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceName""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""WorkspaceLocation""], ""label"": ""Workspace Location""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the CyberArk Audit data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CyberArkAuditAPI-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **CyberArkAuditUsername**, **CyberArkAuditPassword**, **CyberArkAuditServerURL** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the CyberArk Audit data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CyberArkAudit-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CyberArkXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.10.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tCyberArkAuditUsername\n\t\tCyberArkAuditPassword\n\t\tCyberArkAuditServerURL\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Audit REST API Connections details and Credentials"", ""description"": ""**OauthUsername**, **OauthPassword**, **WebAppID**, **AuditApiKey**, **IdentityEndpoint** and **AuditApiBaseUrl** are required for making API calls.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyberArkAudit/Data%20Connectors/CyberArkAudit_API_FunctionApp.json","true" -"CyberArk_AuditEvents_CL","CyberArkAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyberArkAudit","cyberark","cyberark_audit_sentinel","2024-03-01","","","CyberArk Support","Partner","https://www.cyberark.com/services-support/technical-support-contact/","","domains","CyberArkAudit","CyberArk","CyberArkAudit","The [CyberArk Audit](https://docs.cyberark.com/Audit/Latest/en/Content/Resources/_TopNav/cc_Home.htm) data connector provides the capability to retrieve security event logs of the CyberArk Audit service and more events into Microsoft Sentinel through the REST API. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Azure Blob Storage API to pull logs into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details.""}, {""title"": """", ""description"": "">**NOTE:** API authorization key(s) or token(s) are securely stored in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the CyberArk Audit SIEM Integration**\n\n Follow the [instructions](https://docs.cyberark.com/audit/latest/en/Content/Audit/isp_Microsoft_Sentinel.htm?tocpath=SIEM%20integrations%7C_____3) to obtain connection details and credentials.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the CyberArk Audit data connector, have the Workspace Name and Workspace Location (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceName""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""WorkspaceLocation""], ""label"": ""Workspace Location""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the CyberArk Audit data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CyberArkAuditAPI-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **CyberArkAuditUsername**, **CyberArkAuditPassword**, **CyberArkAuditServerURL** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the CyberArk Audit data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CyberArkAudit-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CyberArkXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.10.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tCyberArkAuditUsername\n\t\tCyberArkAuditPassword\n\t\tCyberArkAuditServerURL\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Audit REST API Connections details and Credentials"", ""description"": ""**OauthUsername**, **OauthPassword**, **WebAppID**, **AuditApiKey**, **IdentityEndpoint** and **AuditApiBaseUrl** are required for making API calls.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyberArkAudit/Data%20Connectors/CyberArkAudit_API_FunctionApp.json","true" -"","CyberArkEPM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyberArkEPM","cyberark","cybr_epm_sentinel","2022-04-10","","","CyberArk Support","Partner","https://www.cyberark.com/services-support/technical-support-contact/","","domains","","","","","","","false","","false" -"CyberArkEPM_CL","CyberArkEPM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyberArkEPM","cyberark","cybr_epm_sentinel","2022-04-10","","","CyberArk Support","Partner","https://www.cyberark.com/services-support/technical-support-contact/","","domains","CyberArkEPM","CyberArk","CyberArkEPM","The [CyberArk Endpoint Privilege Manager](https://www.cyberark.com/products/endpoint-privilege-manager/) data connector provides the capability to retrieve security event logs of the CyberArk EPM services and more events into Microsoft Sentinel through the REST API. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Azure Blob Storage API to pull logs into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**CyberArkEPM**](https://aka.ms/sentinel-CyberArkEPM-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the CyberArk EPM API**\n\n Follow the instructions to obtain the credentials.\n\n1. Use Username and Password for your CyberArk EPM account.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the CyberArk EPM data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the CyberArk EPM data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CyberArkEPMAPI-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **CyberArkEPMUsername**, **CyberArkEPMPassword**, **CyberArkEPMServerURL** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the CyberArk EPM data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CyberArkEPMAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CyberArkXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.10.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tCyberArkEPMUsername\n\t\tCyberArkEPMPassword\n\t\tCyberArkEPMServerURL\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**CyberArkEPMUsername**, **CyberArkEPMPassword** and **CyberArkEPMServerURL** are required for making API calls.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyberArkEPM/Data%20Connectors/CyberArkEPM_API_FunctionApp.json","true" -"","CybersecurityMaturityModelCertification(CMMC)2.0","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CybersecurityMaturityModelCertification%28CMMC%292.0","azuresentinel","azure-sentinel-solution-cmmcv2","2022-01-06","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"","Cybersixgill-Actionable-Alerts","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cybersixgill-Actionable-Alerts","cybersixgill1657701397011","azure-sentinel-cybersixgill-actionable-alerts","2023-02-27","2024-09-24","","Cybersixgill","Partner","https://www.cybersixgill.com/","","domains","","","","","","","false","","false" -"CyberSixgill_Alerts_CL","Cybersixgill-Actionable-Alerts","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cybersixgill-Actionable-Alerts","cybersixgill1657701397011","azure-sentinel-cybersixgill-actionable-alerts","2023-02-27","2024-09-24","","Cybersixgill","Partner","https://www.cybersixgill.com/","","domains","CybersixgillActionableAlerts","Cybersixgill","Cybersixgill Actionable Alerts","Actionable alerts provide customized alerts based on configured assets","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Cybersixgill API to pull Alerts into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cybersixgill Actionable Alerts data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/senitnel-cybersixgill-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **Client ID**, **Client Secret**, **TimeInterval** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cybersixgill Actionable Alerts data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> NOTE:You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cybersixgill-Actionable-Alerts/Data%20Connectors/CybersixgillAlerts.zip?raw=true) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CybersixgillAlertsXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tClientID\n\t\tClientSecret\n\t\tPolling\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Client_ID** and **Client_Secret** are required for making API calls.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cybersixgill-Actionable-Alerts/Data%20Connectors/Cybersixgill_FunctionApp.json","true" -"","Cyble Vision","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyble%20Vision","cybleinc1737472004964","cybleinc1737472004964-azure-sentinel-offerid","2025-05-05","","","Cyble Support","Partner","https://cyble.com/talk-to-sales/","","domains","","","","","","","false","","false" -"CybleVisionAlerts_CL","Cyble Vision","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyble%20Vision","cybleinc1737472004964","cybleinc1737472004964-azure-sentinel-offerid","2025-05-05","","","Cyble Support","Partner","https://cyble.com/talk-to-sales/","","domains","CybleVisionAlerts","Cyble","Cyble Vision Alerts","The **Cyble Vision Alerts** CCF Data Connector enables Ingestion of Threat Alerts from Cyble Vision into Microsoft Sentinel using the Codeless Connector Framework Connector. It collects alert data via API, normalizes it, and stores it in a custom table for advanced detection, correlation, and response.","[{""title"": ""Step 1 - Generating API Token from Cyble Platform"", ""description"": ""Navigate to [Cyble Platform](https://cyble.ai/utilities/access-apis) and log in using your Cyble Vision credentials.\n\nOnce logged in, go to the left-hand panel and scroll down to **Utilities**. Click on **Access APIs**. On the top-right corner of the page, click the **+ (Add)** icon to generate a new API key. Provide an alias (a friendly name for your key) and click **Generate**. Copy the generated API token and store it securely.""}, {""title"": ""STEP 2 - Configure the Data Connector"", ""description"": ""Return to Microsoft Sentinel and open the **Cyble Vision Alerts** data connector configuration page. Paste your Cyble API Token into the **API Token** field under 'API Details'."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""API Token"", ""placeholder"": ""Enter your API Token"", ""type"": ""password"", ""name"": ""ApiToken""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Query Interval (in minutes)"", ""placeholder"": ""Enter Time in Minutes (e.g., 10)"", ""type"": ""Textbox"", ""name"": ""querywindow""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""toggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Cyble Vision API token"", ""description"": ""An API Token from Cyble Vision Platform is required.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyble%20Vision/Data%20Connectors/CybleVisionAlerts_CCF/CybleVisionAlerts_DataConnectorDefinition.json","true" -"","Cyborg Security HUNTER","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyborg%20Security%20HUNTER","cyborgsecurityinc1689265652101","azure-sentinel-solution-cyborgsecurity-hunter","2023-07-03","2023-09-22","","Cyborg Security","Partner","https://hunter.cyborgsecurity.io/customer-support","","domains","","","","","","","false","","false" -"SecurityEvent","Cyborg Security HUNTER","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyborg%20Security%20HUNTER","cyborgsecurityinc1689265652101","azure-sentinel-solution-cyborgsecurity-hunter","2023-07-03","2023-09-22","","Cyborg Security","Partner","https://hunter.cyborgsecurity.io/customer-support","","domains","CyborgSecurity_HUNTER","Cyborg Security","Cyborg Security HUNTER Hunt Packages","Cyborg Security is a leading provider of advanced threat hunting solutions, with a mission to empower organizations with cutting-edge technology and collaborative tools to proactively detect and respond to cyber threats. Cyborg Security's flagship offering, the HUNTER Platform, combines powerful analytics, curated threat hunting content, and comprehensive hunt management capabilities to create a dynamic ecosystem for effective threat hunting operations.

Follow the steps to gain access to Cyborg Security's Community and setup the 'Open in Tool' capabilities in the HUNTER Platform.","[{""instructions"": [{""parameters"": {""text"": ""Use the following link to find your Azure Tentant ID How to find your Azure Active Directory tenant ID"", ""visible"": true, ""inline"": true}, ""type"": ""InfoMessage""}, {""parameters"": {""fillWith"": [""workspaceName""], ""label"": ""ResourceGroupName & WorkspaceName"", ""value"": ""{0}""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""WorkspaceID"", ""value"": ""{0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""1. Sign up for Cyborg Security's HUNTER Community Account"", ""description"": ""Cyborg Security offers Community Memebers access to a subset of the Emerging Threat Collections and hunt packages.\n\nCreate a Free Commuinity Account to get access to Cyborg Security's Hunt Packages: [Sign Up Now!](https://www.cyborgsecurity.com/user-account-creation/)""}, {""title"": ""2. Configure the Open in Tool Feature"", ""description"": ""\n\n1. Navigate to the [Environment](https://hunter.cyborgsecurity.io/environment) section of the HUNTER Platform.\n2. Fill in te **Root URI** of your environment in the section labeled **Microsoft Sentinel**. Replace the with the IDs and Names of your Subscription, Resource Groups and Workspaces.\n\n https[]()://portal.azure.com#@**AzureTenantID**/blade/Microsoft_OperationsManagementSuite_Workspace/Logs.ReactView/resourceId/%2Fsubscriptions%2F**AzureSubscriptionID**%2Fresourcegroups%2F**ResourceGroupName**%2Fproviders%2Fmicrosoft.operationalinsights%2Fworkspaces%2F<**WorkspaceName**>/\n3. Click **Save**.""}, {""title"": ""3. Execute a HUNTER hunt pacakge in Microsoft Sentinel"", ""description"": ""\n\nIdentify a Cyborg Security HUNTER hunt package to deploy and use the **Open In Tool** button to quickly open Microsoft Sentinel and stage the hunting content.\n\n![image](https://7924572.fs1.hubspotusercontent-na1.net/hubfs/7924572/HUNTER/Screenshots/openintool-ms-new.png)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyborg%20Security%20HUNTER/Data%20Connectors/CyborgSecurity_HUNTER.json","true" -"","CyeraDSPM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM","cyera1658314682323","azure-sentinel-solution-cyeradspm","2025-10-15","2025-10-29","","Cyera Inc","Partner","https://support.cyera.io","","domains","","","","","","","false","","false" -"CyeraAssets_CL","CyeraDSPM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM","cyera1658314682323","azure-sentinel-solution-cyeradspm","2025-10-15","2025-10-29","","Cyera Inc","Partner","https://support.cyera.io","","domains","CyeraDSPMCCF","Cyera Inc","Cyera DSPM Microsoft Sentinel Data Connector","The [Cyera DSPM](https://api.cyera.io/) data connector allows you to connect to your Cyera's DSPM tenant and ingesting Classifications, Assets, Issues, and Identity Resources/Definitions into Microsoft Sentinel. The data connector is built on Microsoft Sentinel's Codeless Connector Framework and uses the Cyera's API to fetch Cyera's [DSPM Telemetry](https://www.cyera.com/) once received can be correlated with security events creating custom columns so that queries don't need to parse it again, thus resulting in better performance.","[{""description"": ""Connect to your Cyera DSPM tenenant via Personal Access Tokens"", ""instructions"": [{""parameters"": {""label"": ""Cyera Personal Access Token Client ID"", ""name"": ""clientId"", ""placeholder"": ""client_id"", ""type"": ""text""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""Cyera Personal Access Token Secret Key"", ""name"": ""clientSecret"", ""placeholder"": ""secret_key"", ""type"": ""password""}, ""type"": ""Textbox""}, {""parameters"": {""connectLabel"": ""Connect"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""title"": ""Cyera DSPM Authentication""}]","{""resourceProvider"": [{""permissionsDisplayText"": ""Read and Write permissions are required."", ""provider"": ""Microsoft.OperationalInsights/workspaces"", ""providerDisplayName"": ""Workspace"", ""requiredPermissions"": {""action"": false, ""delete"": true, ""read"": true, ""write"": true}, ""scope"": ""Workspace""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM/Data%20Connectors/CyeraDSPM_CCF/CyeraDSPMLogs_ConnectorDefinitionCCF.json","true" -"CyeraAssets_MS_CL","CyeraDSPM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM","cyera1658314682323","azure-sentinel-solution-cyeradspm","2025-10-15","2025-10-29","","Cyera Inc","Partner","https://support.cyera.io","","domains","CyeraDSPMCCF","Cyera Inc","Cyera DSPM Microsoft Sentinel Data Connector","The [Cyera DSPM](https://api.cyera.io/) data connector allows you to connect to your Cyera's DSPM tenant and ingesting Classifications, Assets, Issues, and Identity Resources/Definitions into Microsoft Sentinel. The data connector is built on Microsoft Sentinel's Codeless Connector Framework and uses the Cyera's API to fetch Cyera's [DSPM Telemetry](https://www.cyera.com/) once received can be correlated with security events creating custom columns so that queries don't need to parse it again, thus resulting in better performance.","[{""description"": ""Connect to your Cyera DSPM tenenant via Personal Access Tokens"", ""instructions"": [{""parameters"": {""label"": ""Cyera Personal Access Token Client ID"", ""name"": ""clientId"", ""placeholder"": ""client_id"", ""type"": ""text""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""Cyera Personal Access Token Secret Key"", ""name"": ""clientSecret"", ""placeholder"": ""secret_key"", ""type"": ""password""}, ""type"": ""Textbox""}, {""parameters"": {""connectLabel"": ""Connect"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""title"": ""Cyera DSPM Authentication""}]","{""resourceProvider"": [{""permissionsDisplayText"": ""Read and Write permissions are required."", ""provider"": ""Microsoft.OperationalInsights/workspaces"", ""providerDisplayName"": ""Workspace"", ""requiredPermissions"": {""action"": false, ""delete"": true, ""read"": true, ""write"": true}, ""scope"": ""Workspace""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM/Data%20Connectors/CyeraDSPM_CCF/CyeraDSPMLogs_ConnectorDefinitionCCF.json","true" -"CyeraClassifications_CL","CyeraDSPM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM","cyera1658314682323","azure-sentinel-solution-cyeradspm","2025-10-15","2025-10-29","","Cyera Inc","Partner","https://support.cyera.io","","domains","CyeraDSPMCCF","Cyera Inc","Cyera DSPM Microsoft Sentinel Data Connector","The [Cyera DSPM](https://api.cyera.io/) data connector allows you to connect to your Cyera's DSPM tenant and ingesting Classifications, Assets, Issues, and Identity Resources/Definitions into Microsoft Sentinel. The data connector is built on Microsoft Sentinel's Codeless Connector Framework and uses the Cyera's API to fetch Cyera's [DSPM Telemetry](https://www.cyera.com/) once received can be correlated with security events creating custom columns so that queries don't need to parse it again, thus resulting in better performance.","[{""description"": ""Connect to your Cyera DSPM tenenant via Personal Access Tokens"", ""instructions"": [{""parameters"": {""label"": ""Cyera Personal Access Token Client ID"", ""name"": ""clientId"", ""placeholder"": ""client_id"", ""type"": ""text""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""Cyera Personal Access Token Secret Key"", ""name"": ""clientSecret"", ""placeholder"": ""secret_key"", ""type"": ""password""}, ""type"": ""Textbox""}, {""parameters"": {""connectLabel"": ""Connect"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""title"": ""Cyera DSPM Authentication""}]","{""resourceProvider"": [{""permissionsDisplayText"": ""Read and Write permissions are required."", ""provider"": ""Microsoft.OperationalInsights/workspaces"", ""providerDisplayName"": ""Workspace"", ""requiredPermissions"": {""action"": false, ""delete"": true, ""read"": true, ""write"": true}, ""scope"": ""Workspace""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM/Data%20Connectors/CyeraDSPM_CCF/CyeraDSPMLogs_ConnectorDefinitionCCF.json","true" -"CyeraIdentities_CL","CyeraDSPM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM","cyera1658314682323","azure-sentinel-solution-cyeradspm","2025-10-15","2025-10-29","","Cyera Inc","Partner","https://support.cyera.io","","domains","CyeraDSPMCCF","Cyera Inc","Cyera DSPM Microsoft Sentinel Data Connector","The [Cyera DSPM](https://api.cyera.io/) data connector allows you to connect to your Cyera's DSPM tenant and ingesting Classifications, Assets, Issues, and Identity Resources/Definitions into Microsoft Sentinel. The data connector is built on Microsoft Sentinel's Codeless Connector Framework and uses the Cyera's API to fetch Cyera's [DSPM Telemetry](https://www.cyera.com/) once received can be correlated with security events creating custom columns so that queries don't need to parse it again, thus resulting in better performance.","[{""description"": ""Connect to your Cyera DSPM tenenant via Personal Access Tokens"", ""instructions"": [{""parameters"": {""label"": ""Cyera Personal Access Token Client ID"", ""name"": ""clientId"", ""placeholder"": ""client_id"", ""type"": ""text""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""Cyera Personal Access Token Secret Key"", ""name"": ""clientSecret"", ""placeholder"": ""secret_key"", ""type"": ""password""}, ""type"": ""Textbox""}, {""parameters"": {""connectLabel"": ""Connect"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""title"": ""Cyera DSPM Authentication""}]","{""resourceProvider"": [{""permissionsDisplayText"": ""Read and Write permissions are required."", ""provider"": ""Microsoft.OperationalInsights/workspaces"", ""providerDisplayName"": ""Workspace"", ""requiredPermissions"": {""action"": false, ""delete"": true, ""read"": true, ""write"": true}, ""scope"": ""Workspace""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM/Data%20Connectors/CyeraDSPM_CCF/CyeraDSPMLogs_ConnectorDefinitionCCF.json","true" -"CyeraIssues_CL","CyeraDSPM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM","cyera1658314682323","azure-sentinel-solution-cyeradspm","2025-10-15","2025-10-29","","Cyera Inc","Partner","https://support.cyera.io","","domains","CyeraDSPMCCF","Cyera Inc","Cyera DSPM Microsoft Sentinel Data Connector","The [Cyera DSPM](https://api.cyera.io/) data connector allows you to connect to your Cyera's DSPM tenant and ingesting Classifications, Assets, Issues, and Identity Resources/Definitions into Microsoft Sentinel. The data connector is built on Microsoft Sentinel's Codeless Connector Framework and uses the Cyera's API to fetch Cyera's [DSPM Telemetry](https://www.cyera.com/) once received can be correlated with security events creating custom columns so that queries don't need to parse it again, thus resulting in better performance.","[{""description"": ""Connect to your Cyera DSPM tenenant via Personal Access Tokens"", ""instructions"": [{""parameters"": {""label"": ""Cyera Personal Access Token Client ID"", ""name"": ""clientId"", ""placeholder"": ""client_id"", ""type"": ""text""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""Cyera Personal Access Token Secret Key"", ""name"": ""clientSecret"", ""placeholder"": ""secret_key"", ""type"": ""password""}, ""type"": ""Textbox""}, {""parameters"": {""connectLabel"": ""Connect"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""title"": ""Cyera DSPM Authentication""}]","{""resourceProvider"": [{""permissionsDisplayText"": ""Read and Write permissions are required."", ""provider"": ""Microsoft.OperationalInsights/workspaces"", ""providerDisplayName"": ""Workspace"", ""requiredPermissions"": {""action"": false, ""delete"": true, ""read"": true, ""write"": true}, ""scope"": ""Workspace""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM/Data%20Connectors/CyeraDSPM_CCF/CyeraDSPMLogs_ConnectorDefinitionCCF.json","true" -"CyeraAssets_CL","CyeraDSPM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM","cyera1658314682323","azure-sentinel-solution-cyeradspm","2025-10-15","2025-10-29","","Cyera Inc","Partner","https://support.cyera.io","","domains","CyeraFunctionsConnector","Cyera Inc","Cyera DSPM Azure Functions Microsoft Sentinel Data Connector","The **Cyera DSPM Azure Function Connector** enables seamless ingestion of Cyera’s **Data Security Posture Management (DSPM)** telemetry — *Assets*, *Identities*, *Issues*, and *Classifications* — into **Microsoft Sentinel**.\n\nThis connector uses an **Azure Function App** to call Cyera’s REST API on a schedule, fetch the latest DSPM telemetry, and send it to Microsoft Sentinel through the **Azure Monitor Logs Ingestion API** via a **Data Collection Endpoint (DCE)** and **Data Collection Rule (DCR, kind: Direct)** — no agents required.\n\n**Tables created/used**\n\n| Entity | Table | Purpose |\n|---|---|---|\n| Assets | `CyeraAssets_CL` | Raw asset metadata and data-store context |\n| Identities | `CyeraIdentities_CL` | Identity definitions and sensitivity context |\n| Issues | `CyeraIssues_CL` | Findings and remediation details |\n| Classifications | `CyeraClassifications_CL` | Data class & sensitivity definitions |\n| MS View | `CyeraAssets_MS_CL` | Normalized asset view for dashboards |\n\n> **Note:** This v7 connector supersedes the earlier CCF-based approach and aligns with Microsoft’s recommended Direct ingestion path for Microsoft Sentinel.","[{""title"": ""Note"", ""description"": "">**NOTE:** This connector uses an **Azure Function App** and the **Azure Monitor Logs Ingestion API** (DCE + DCR, kind: Direct). Function runtime and data egress may incur charges. See [Azure Functions pricing](https://azure.microsoft.com/pricing/details/functions/).""}, {""title"": ""Optional Step"", ""description"": "">**(Optional)** Store Cyera API credentials in **Azure Key Vault** and reference them from the Function App. See [Key Vault references](https://learn.microsoft.com/azure/app-service/app-service-key-vault-references).""}, {""title"": ""STEP 1 \u2014 Prepare Cyera API Access"", ""description"": ""1) Generate a **Personal Access Token** [Generating Personal Access Token](https://support.cyera.io/hc/en-us/articles/19446274608919-Personal-and-API-Tokens) in your Cyera tenant.\\n2) Note **API Base URL**, **Client ID**, and **Client Secret**.""}, {""title"": ""STEP 2 \u2014 Choose ONE deployment option"", ""description"": ""> Before deploying, have these values handy:"", ""instructions"": [{""parameters"": {""fillWith"": [""CyeraDSPMConnector""], ""label"": ""Cyera Function Connector Name""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""{{workspace-location}}""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""{{workspace-location}}""], ""label"": ""Workspace Location""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""https://api.cyera.io""], ""label"": ""Cyera Base URL""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""CyeraClientID""], ""label"": ""Cyera Personal Access Token Client ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""CyeraSecret""], ""label"": ""Cyera Personal Access Token Secret""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1"", ""description"": ""**Option 1 - Azure Resource Manager (ARM) Template**\n\nUse this method for automated deployment of the Cyera DSPM Functions and all required resources to support the connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/{{deployment-template-uri)\n2. Select the preferred **FunctionName** and **Workspace Name**. \n3. Enter the **Workspace Location**, **Cyera API Base Url**, **Personal Access Token Client ID**, and **Personal Access Token Secret**. \n>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 \u2014 Manual Deployment"", ""description"": ""Follow the [install pack\u2019s step-by-step guide]({{userguide-url}}.\\n\\n1) Create/update the 5 custom tables, data collection rule with format `sentinel-dce-`, and data collection endpoint with format `sentinel-dcr-` using the scripts in [install-pack-v0_7_0/scripts]({{deployment-script-zip-url}}).\\n2) Deploy the Azure Function from the repo`s Function folder (Timer-trigger; schedule typically 5\u201315 minutes).\\n3) Configure Function App settings:\\n - `CyeraBaseUrl` \u2014 Cyera API Base URL\\n - `CyeraClientId` \u2014 Client ID (PAT)\\n - `CyeraSecret` \u2014 Client Secret (PAT)\\n - `DCR_IMMUTABLE_ID` \u2014 DCR immutable ID\\n - `DCE_ENDPOINT` \u2014 Logs ingestion endpoint URL\\n - `STREAM_ASSETS`=`Custom-CyeraAssets`, `STREAM_IDENTITIES`=`Custom-CyeraIdentities`, `STREAM_ISSUES`=`Custom-CyeraIssues`, `STREAM_CLASSIFICATIONS`=`Custom-CyeraClassifications`\\n4) Save and Start the Function App.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM/Data%20Connectors/CyeraDSPM_Functions/FunctionAppDC.json","true" -"CyeraAssets_MS_CL","CyeraDSPM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM","cyera1658314682323","azure-sentinel-solution-cyeradspm","2025-10-15","2025-10-29","","Cyera Inc","Partner","https://support.cyera.io","","domains","CyeraFunctionsConnector","Cyera Inc","Cyera DSPM Azure Functions Microsoft Sentinel Data Connector","The **Cyera DSPM Azure Function Connector** enables seamless ingestion of Cyera’s **Data Security Posture Management (DSPM)** telemetry — *Assets*, *Identities*, *Issues*, and *Classifications* — into **Microsoft Sentinel**.\n\nThis connector uses an **Azure Function App** to call Cyera’s REST API on a schedule, fetch the latest DSPM telemetry, and send it to Microsoft Sentinel through the **Azure Monitor Logs Ingestion API** via a **Data Collection Endpoint (DCE)** and **Data Collection Rule (DCR, kind: Direct)** — no agents required.\n\n**Tables created/used**\n\n| Entity | Table | Purpose |\n|---|---|---|\n| Assets | `CyeraAssets_CL` | Raw asset metadata and data-store context |\n| Identities | `CyeraIdentities_CL` | Identity definitions and sensitivity context |\n| Issues | `CyeraIssues_CL` | Findings and remediation details |\n| Classifications | `CyeraClassifications_CL` | Data class & sensitivity definitions |\n| MS View | `CyeraAssets_MS_CL` | Normalized asset view for dashboards |\n\n> **Note:** This v7 connector supersedes the earlier CCF-based approach and aligns with Microsoft’s recommended Direct ingestion path for Microsoft Sentinel.","[{""title"": ""Note"", ""description"": "">**NOTE:** This connector uses an **Azure Function App** and the **Azure Monitor Logs Ingestion API** (DCE + DCR, kind: Direct). Function runtime and data egress may incur charges. See [Azure Functions pricing](https://azure.microsoft.com/pricing/details/functions/).""}, {""title"": ""Optional Step"", ""description"": "">**(Optional)** Store Cyera API credentials in **Azure Key Vault** and reference them from the Function App. See [Key Vault references](https://learn.microsoft.com/azure/app-service/app-service-key-vault-references).""}, {""title"": ""STEP 1 \u2014 Prepare Cyera API Access"", ""description"": ""1) Generate a **Personal Access Token** [Generating Personal Access Token](https://support.cyera.io/hc/en-us/articles/19446274608919-Personal-and-API-Tokens) in your Cyera tenant.\\n2) Note **API Base URL**, **Client ID**, and **Client Secret**.""}, {""title"": ""STEP 2 \u2014 Choose ONE deployment option"", ""description"": ""> Before deploying, have these values handy:"", ""instructions"": [{""parameters"": {""fillWith"": [""CyeraDSPMConnector""], ""label"": ""Cyera Function Connector Name""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""{{workspace-location}}""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""{{workspace-location}}""], ""label"": ""Workspace Location""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""https://api.cyera.io""], ""label"": ""Cyera Base URL""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""CyeraClientID""], ""label"": ""Cyera Personal Access Token Client ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""CyeraSecret""], ""label"": ""Cyera Personal Access Token Secret""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1"", ""description"": ""**Option 1 - Azure Resource Manager (ARM) Template**\n\nUse this method for automated deployment of the Cyera DSPM Functions and all required resources to support the connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/{{deployment-template-uri)\n2. Select the preferred **FunctionName** and **Workspace Name**. \n3. Enter the **Workspace Location**, **Cyera API Base Url**, **Personal Access Token Client ID**, and **Personal Access Token Secret**. \n>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 \u2014 Manual Deployment"", ""description"": ""Follow the [install pack\u2019s step-by-step guide]({{userguide-url}}.\\n\\n1) Create/update the 5 custom tables, data collection rule with format `sentinel-dce-`, and data collection endpoint with format `sentinel-dcr-` using the scripts in [install-pack-v0_7_0/scripts]({{deployment-script-zip-url}}).\\n2) Deploy the Azure Function from the repo`s Function folder (Timer-trigger; schedule typically 5\u201315 minutes).\\n3) Configure Function App settings:\\n - `CyeraBaseUrl` \u2014 Cyera API Base URL\\n - `CyeraClientId` \u2014 Client ID (PAT)\\n - `CyeraSecret` \u2014 Client Secret (PAT)\\n - `DCR_IMMUTABLE_ID` \u2014 DCR immutable ID\\n - `DCE_ENDPOINT` \u2014 Logs ingestion endpoint URL\\n - `STREAM_ASSETS`=`Custom-CyeraAssets`, `STREAM_IDENTITIES`=`Custom-CyeraIdentities`, `STREAM_ISSUES`=`Custom-CyeraIssues`, `STREAM_CLASSIFICATIONS`=`Custom-CyeraClassifications`\\n4) Save and Start the Function App.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM/Data%20Connectors/CyeraDSPM_Functions/FunctionAppDC.json","true" -"CyeraClassifications_CL","CyeraDSPM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM","cyera1658314682323","azure-sentinel-solution-cyeradspm","2025-10-15","2025-10-29","","Cyera Inc","Partner","https://support.cyera.io","","domains","CyeraFunctionsConnector","Cyera Inc","Cyera DSPM Azure Functions Microsoft Sentinel Data Connector","The **Cyera DSPM Azure Function Connector** enables seamless ingestion of Cyera’s **Data Security Posture Management (DSPM)** telemetry — *Assets*, *Identities*, *Issues*, and *Classifications* — into **Microsoft Sentinel**.\n\nThis connector uses an **Azure Function App** to call Cyera’s REST API on a schedule, fetch the latest DSPM telemetry, and send it to Microsoft Sentinel through the **Azure Monitor Logs Ingestion API** via a **Data Collection Endpoint (DCE)** and **Data Collection Rule (DCR, kind: Direct)** — no agents required.\n\n**Tables created/used**\n\n| Entity | Table | Purpose |\n|---|---|---|\n| Assets | `CyeraAssets_CL` | Raw asset metadata and data-store context |\n| Identities | `CyeraIdentities_CL` | Identity definitions and sensitivity context |\n| Issues | `CyeraIssues_CL` | Findings and remediation details |\n| Classifications | `CyeraClassifications_CL` | Data class & sensitivity definitions |\n| MS View | `CyeraAssets_MS_CL` | Normalized asset view for dashboards |\n\n> **Note:** This v7 connector supersedes the earlier CCF-based approach and aligns with Microsoft’s recommended Direct ingestion path for Microsoft Sentinel.","[{""title"": ""Note"", ""description"": "">**NOTE:** This connector uses an **Azure Function App** and the **Azure Monitor Logs Ingestion API** (DCE + DCR, kind: Direct). Function runtime and data egress may incur charges. See [Azure Functions pricing](https://azure.microsoft.com/pricing/details/functions/).""}, {""title"": ""Optional Step"", ""description"": "">**(Optional)** Store Cyera API credentials in **Azure Key Vault** and reference them from the Function App. See [Key Vault references](https://learn.microsoft.com/azure/app-service/app-service-key-vault-references).""}, {""title"": ""STEP 1 \u2014 Prepare Cyera API Access"", ""description"": ""1) Generate a **Personal Access Token** [Generating Personal Access Token](https://support.cyera.io/hc/en-us/articles/19446274608919-Personal-and-API-Tokens) in your Cyera tenant.\\n2) Note **API Base URL**, **Client ID**, and **Client Secret**.""}, {""title"": ""STEP 2 \u2014 Choose ONE deployment option"", ""description"": ""> Before deploying, have these values handy:"", ""instructions"": [{""parameters"": {""fillWith"": [""CyeraDSPMConnector""], ""label"": ""Cyera Function Connector Name""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""{{workspace-location}}""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""{{workspace-location}}""], ""label"": ""Workspace Location""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""https://api.cyera.io""], ""label"": ""Cyera Base URL""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""CyeraClientID""], ""label"": ""Cyera Personal Access Token Client ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""CyeraSecret""], ""label"": ""Cyera Personal Access Token Secret""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1"", ""description"": ""**Option 1 - Azure Resource Manager (ARM) Template**\n\nUse this method for automated deployment of the Cyera DSPM Functions and all required resources to support the connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/{{deployment-template-uri)\n2. Select the preferred **FunctionName** and **Workspace Name**. \n3. Enter the **Workspace Location**, **Cyera API Base Url**, **Personal Access Token Client ID**, and **Personal Access Token Secret**. \n>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 \u2014 Manual Deployment"", ""description"": ""Follow the [install pack\u2019s step-by-step guide]({{userguide-url}}.\\n\\n1) Create/update the 5 custom tables, data collection rule with format `sentinel-dce-`, and data collection endpoint with format `sentinel-dcr-` using the scripts in [install-pack-v0_7_0/scripts]({{deployment-script-zip-url}}).\\n2) Deploy the Azure Function from the repo`s Function folder (Timer-trigger; schedule typically 5\u201315 minutes).\\n3) Configure Function App settings:\\n - `CyeraBaseUrl` \u2014 Cyera API Base URL\\n - `CyeraClientId` \u2014 Client ID (PAT)\\n - `CyeraSecret` \u2014 Client Secret (PAT)\\n - `DCR_IMMUTABLE_ID` \u2014 DCR immutable ID\\n - `DCE_ENDPOINT` \u2014 Logs ingestion endpoint URL\\n - `STREAM_ASSETS`=`Custom-CyeraAssets`, `STREAM_IDENTITIES`=`Custom-CyeraIdentities`, `STREAM_ISSUES`=`Custom-CyeraIssues`, `STREAM_CLASSIFICATIONS`=`Custom-CyeraClassifications`\\n4) Save and Start the Function App.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM/Data%20Connectors/CyeraDSPM_Functions/FunctionAppDC.json","true" -"CyeraIdentities_CL","CyeraDSPM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM","cyera1658314682323","azure-sentinel-solution-cyeradspm","2025-10-15","2025-10-29","","Cyera Inc","Partner","https://support.cyera.io","","domains","CyeraFunctionsConnector","Cyera Inc","Cyera DSPM Azure Functions Microsoft Sentinel Data Connector","The **Cyera DSPM Azure Function Connector** enables seamless ingestion of Cyera’s **Data Security Posture Management (DSPM)** telemetry — *Assets*, *Identities*, *Issues*, and *Classifications* — into **Microsoft Sentinel**.\n\nThis connector uses an **Azure Function App** to call Cyera’s REST API on a schedule, fetch the latest DSPM telemetry, and send it to Microsoft Sentinel through the **Azure Monitor Logs Ingestion API** via a **Data Collection Endpoint (DCE)** and **Data Collection Rule (DCR, kind: Direct)** — no agents required.\n\n**Tables created/used**\n\n| Entity | Table | Purpose |\n|---|---|---|\n| Assets | `CyeraAssets_CL` | Raw asset metadata and data-store context |\n| Identities | `CyeraIdentities_CL` | Identity definitions and sensitivity context |\n| Issues | `CyeraIssues_CL` | Findings and remediation details |\n| Classifications | `CyeraClassifications_CL` | Data class & sensitivity definitions |\n| MS View | `CyeraAssets_MS_CL` | Normalized asset view for dashboards |\n\n> **Note:** This v7 connector supersedes the earlier CCF-based approach and aligns with Microsoft’s recommended Direct ingestion path for Microsoft Sentinel.","[{""title"": ""Note"", ""description"": "">**NOTE:** This connector uses an **Azure Function App** and the **Azure Monitor Logs Ingestion API** (DCE + DCR, kind: Direct). Function runtime and data egress may incur charges. See [Azure Functions pricing](https://azure.microsoft.com/pricing/details/functions/).""}, {""title"": ""Optional Step"", ""description"": "">**(Optional)** Store Cyera API credentials in **Azure Key Vault** and reference them from the Function App. See [Key Vault references](https://learn.microsoft.com/azure/app-service/app-service-key-vault-references).""}, {""title"": ""STEP 1 \u2014 Prepare Cyera API Access"", ""description"": ""1) Generate a **Personal Access Token** [Generating Personal Access Token](https://support.cyera.io/hc/en-us/articles/19446274608919-Personal-and-API-Tokens) in your Cyera tenant.\\n2) Note **API Base URL**, **Client ID**, and **Client Secret**.""}, {""title"": ""STEP 2 \u2014 Choose ONE deployment option"", ""description"": ""> Before deploying, have these values handy:"", ""instructions"": [{""parameters"": {""fillWith"": [""CyeraDSPMConnector""], ""label"": ""Cyera Function Connector Name""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""{{workspace-location}}""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""{{workspace-location}}""], ""label"": ""Workspace Location""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""https://api.cyera.io""], ""label"": ""Cyera Base URL""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""CyeraClientID""], ""label"": ""Cyera Personal Access Token Client ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""CyeraSecret""], ""label"": ""Cyera Personal Access Token Secret""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1"", ""description"": ""**Option 1 - Azure Resource Manager (ARM) Template**\n\nUse this method for automated deployment of the Cyera DSPM Functions and all required resources to support the connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/{{deployment-template-uri)\n2. Select the preferred **FunctionName** and **Workspace Name**. \n3. Enter the **Workspace Location**, **Cyera API Base Url**, **Personal Access Token Client ID**, and **Personal Access Token Secret**. \n>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 \u2014 Manual Deployment"", ""description"": ""Follow the [install pack\u2019s step-by-step guide]({{userguide-url}}.\\n\\n1) Create/update the 5 custom tables, data collection rule with format `sentinel-dce-`, and data collection endpoint with format `sentinel-dcr-` using the scripts in [install-pack-v0_7_0/scripts]({{deployment-script-zip-url}}).\\n2) Deploy the Azure Function from the repo`s Function folder (Timer-trigger; schedule typically 5\u201315 minutes).\\n3) Configure Function App settings:\\n - `CyeraBaseUrl` \u2014 Cyera API Base URL\\n - `CyeraClientId` \u2014 Client ID (PAT)\\n - `CyeraSecret` \u2014 Client Secret (PAT)\\n - `DCR_IMMUTABLE_ID` \u2014 DCR immutable ID\\n - `DCE_ENDPOINT` \u2014 Logs ingestion endpoint URL\\n - `STREAM_ASSETS`=`Custom-CyeraAssets`, `STREAM_IDENTITIES`=`Custom-CyeraIdentities`, `STREAM_ISSUES`=`Custom-CyeraIssues`, `STREAM_CLASSIFICATIONS`=`Custom-CyeraClassifications`\\n4) Save and Start the Function App.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM/Data%20Connectors/CyeraDSPM_Functions/FunctionAppDC.json","true" -"CyeraIssues_CL","CyeraDSPM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM","cyera1658314682323","azure-sentinel-solution-cyeradspm","2025-10-15","2025-10-29","","Cyera Inc","Partner","https://support.cyera.io","","domains","CyeraFunctionsConnector","Cyera Inc","Cyera DSPM Azure Functions Microsoft Sentinel Data Connector","The **Cyera DSPM Azure Function Connector** enables seamless ingestion of Cyera’s **Data Security Posture Management (DSPM)** telemetry — *Assets*, *Identities*, *Issues*, and *Classifications* — into **Microsoft Sentinel**.\n\nThis connector uses an **Azure Function App** to call Cyera’s REST API on a schedule, fetch the latest DSPM telemetry, and send it to Microsoft Sentinel through the **Azure Monitor Logs Ingestion API** via a **Data Collection Endpoint (DCE)** and **Data Collection Rule (DCR, kind: Direct)** — no agents required.\n\n**Tables created/used**\n\n| Entity | Table | Purpose |\n|---|---|---|\n| Assets | `CyeraAssets_CL` | Raw asset metadata and data-store context |\n| Identities | `CyeraIdentities_CL` | Identity definitions and sensitivity context |\n| Issues | `CyeraIssues_CL` | Findings and remediation details |\n| Classifications | `CyeraClassifications_CL` | Data class & sensitivity definitions |\n| MS View | `CyeraAssets_MS_CL` | Normalized asset view for dashboards |\n\n> **Note:** This v7 connector supersedes the earlier CCF-based approach and aligns with Microsoft’s recommended Direct ingestion path for Microsoft Sentinel.","[{""title"": ""Note"", ""description"": "">**NOTE:** This connector uses an **Azure Function App** and the **Azure Monitor Logs Ingestion API** (DCE + DCR, kind: Direct). Function runtime and data egress may incur charges. See [Azure Functions pricing](https://azure.microsoft.com/pricing/details/functions/).""}, {""title"": ""Optional Step"", ""description"": "">**(Optional)** Store Cyera API credentials in **Azure Key Vault** and reference them from the Function App. See [Key Vault references](https://learn.microsoft.com/azure/app-service/app-service-key-vault-references).""}, {""title"": ""STEP 1 \u2014 Prepare Cyera API Access"", ""description"": ""1) Generate a **Personal Access Token** [Generating Personal Access Token](https://support.cyera.io/hc/en-us/articles/19446274608919-Personal-and-API-Tokens) in your Cyera tenant.\\n2) Note **API Base URL**, **Client ID**, and **Client Secret**.""}, {""title"": ""STEP 2 \u2014 Choose ONE deployment option"", ""description"": ""> Before deploying, have these values handy:"", ""instructions"": [{""parameters"": {""fillWith"": [""CyeraDSPMConnector""], ""label"": ""Cyera Function Connector Name""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""{{workspace-location}}""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""{{workspace-location}}""], ""label"": ""Workspace Location""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""https://api.cyera.io""], ""label"": ""Cyera Base URL""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""CyeraClientID""], ""label"": ""Cyera Personal Access Token Client ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""CyeraSecret""], ""label"": ""Cyera Personal Access Token Secret""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1"", ""description"": ""**Option 1 - Azure Resource Manager (ARM) Template**\n\nUse this method for automated deployment of the Cyera DSPM Functions and all required resources to support the connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/{{deployment-template-uri)\n2. Select the preferred **FunctionName** and **Workspace Name**. \n3. Enter the **Workspace Location**, **Cyera API Base Url**, **Personal Access Token Client ID**, and **Personal Access Token Secret**. \n>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 \u2014 Manual Deployment"", ""description"": ""Follow the [install pack\u2019s step-by-step guide]({{userguide-url}}.\\n\\n1) Create/update the 5 custom tables, data collection rule with format `sentinel-dce-`, and data collection endpoint with format `sentinel-dcr-` using the scripts in [install-pack-v0_7_0/scripts]({{deployment-script-zip-url}}).\\n2) Deploy the Azure Function from the repo`s Function folder (Timer-trigger; schedule typically 5\u201315 minutes).\\n3) Configure Function App settings:\\n - `CyeraBaseUrl` \u2014 Cyera API Base URL\\n - `CyeraClientId` \u2014 Client ID (PAT)\\n - `CyeraSecret` \u2014 Client Secret (PAT)\\n - `DCR_IMMUTABLE_ID` \u2014 DCR immutable ID\\n - `DCE_ENDPOINT` \u2014 Logs ingestion endpoint URL\\n - `STREAM_ASSETS`=`Custom-CyeraAssets`, `STREAM_IDENTITIES`=`Custom-CyeraIdentities`, `STREAM_ISSUES`=`Custom-CyeraIssues`, `STREAM_CLASSIFICATIONS`=`Custom-CyeraClassifications`\\n4) Save and Start the Function App.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM/Data%20Connectors/CyeraDSPM_Functions/FunctionAppDC.json","true" -"","Cyfirma Attack Surface","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Attack%20Surface","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-attack-surface","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","","","","","","","false","","false" -"CyfirmaASCertificatesAlerts_CL","Cyfirma Attack Surface","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Attack%20Surface","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-attack-surface","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaAttackSurfaceAlertsConnector","Microsoft","CYFIRMA Attack Surface","","[{""title"": ""CYFIRMA Attack Surface"", ""description"": ""Connect to CYFIRMA Attack Surface to ingest alerts into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into custom tables during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Attack%20Surface/Data%20Connectors/CyfirmaASAlerts_ccp/CyfirmaASAlerts_DataConnectorDefinition.json","true" -"CyfirmaASCloudWeaknessAlerts_CL","Cyfirma Attack Surface","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Attack%20Surface","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-attack-surface","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaAttackSurfaceAlertsConnector","Microsoft","CYFIRMA Attack Surface","","[{""title"": ""CYFIRMA Attack Surface"", ""description"": ""Connect to CYFIRMA Attack Surface to ingest alerts into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into custom tables during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Attack%20Surface/Data%20Connectors/CyfirmaASAlerts_ccp/CyfirmaASAlerts_DataConnectorDefinition.json","true" -"CyfirmaASConfigurationAlerts_CL","Cyfirma Attack Surface","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Attack%20Surface","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-attack-surface","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaAttackSurfaceAlertsConnector","Microsoft","CYFIRMA Attack Surface","","[{""title"": ""CYFIRMA Attack Surface"", ""description"": ""Connect to CYFIRMA Attack Surface to ingest alerts into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into custom tables during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Attack%20Surface/Data%20Connectors/CyfirmaASAlerts_ccp/CyfirmaASAlerts_DataConnectorDefinition.json","true" -"CyfirmaASDomainIPReputationAlerts_CL","Cyfirma Attack Surface","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Attack%20Surface","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-attack-surface","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaAttackSurfaceAlertsConnector","Microsoft","CYFIRMA Attack Surface","","[{""title"": ""CYFIRMA Attack Surface"", ""description"": ""Connect to CYFIRMA Attack Surface to ingest alerts into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into custom tables during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Attack%20Surface/Data%20Connectors/CyfirmaASAlerts_ccp/CyfirmaASAlerts_DataConnectorDefinition.json","true" -"CyfirmaASDomainIPVulnerabilityAlerts_CL","Cyfirma Attack Surface","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Attack%20Surface","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-attack-surface","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaAttackSurfaceAlertsConnector","Microsoft","CYFIRMA Attack Surface","","[{""title"": ""CYFIRMA Attack Surface"", ""description"": ""Connect to CYFIRMA Attack Surface to ingest alerts into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into custom tables during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Attack%20Surface/Data%20Connectors/CyfirmaASAlerts_ccp/CyfirmaASAlerts_DataConnectorDefinition.json","true" -"CyfirmaASOpenPortsAlerts_CL","Cyfirma Attack Surface","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Attack%20Surface","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-attack-surface","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaAttackSurfaceAlertsConnector","Microsoft","CYFIRMA Attack Surface","","[{""title"": ""CYFIRMA Attack Surface"", ""description"": ""Connect to CYFIRMA Attack Surface to ingest alerts into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into custom tables during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Attack%20Surface/Data%20Connectors/CyfirmaASAlerts_ccp/CyfirmaASAlerts_DataConnectorDefinition.json","true" -"","Cyfirma Brand Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Brand%20Intelligence","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-brand-intelligence","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","","","","","","","false","","false" -"CyfirmaBIDomainITAssetAlerts_CL","Cyfirma Brand Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Brand%20Intelligence","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-brand-intelligence","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaBrandIntelligenceAlertsDC","Microsoft","CYFIRMA Brand Intelligence","","[{""title"": ""CYFIRMA Brand Intelligence"", ""description"": ""Connect to CYFIRMA Brand Intelligence to ingest alerts data into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT Alerts API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into custom tables during ingestion. This enhances performance and efficiency by eliminating the need for query-time parsing."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Brand%20Intelligence/Data%20Connectors/CyfirmaBIAlerts_ccp/CyfirmaBIAlerts_DataConnectorDefinition.json","true" -"CyfirmaBIExecutivePeopleAlerts_CL","Cyfirma Brand Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Brand%20Intelligence","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-brand-intelligence","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaBrandIntelligenceAlertsDC","Microsoft","CYFIRMA Brand Intelligence","","[{""title"": ""CYFIRMA Brand Intelligence"", ""description"": ""Connect to CYFIRMA Brand Intelligence to ingest alerts data into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT Alerts API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into custom tables during ingestion. This enhances performance and efficiency by eliminating the need for query-time parsing."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Brand%20Intelligence/Data%20Connectors/CyfirmaBIAlerts_ccp/CyfirmaBIAlerts_DataConnectorDefinition.json","true" -"CyfirmaBIMaliciousMobileAppsAlerts_CL","Cyfirma Brand Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Brand%20Intelligence","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-brand-intelligence","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaBrandIntelligenceAlertsDC","Microsoft","CYFIRMA Brand Intelligence","","[{""title"": ""CYFIRMA Brand Intelligence"", ""description"": ""Connect to CYFIRMA Brand Intelligence to ingest alerts data into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT Alerts API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into custom tables during ingestion. This enhances performance and efficiency by eliminating the need for query-time parsing."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Brand%20Intelligence/Data%20Connectors/CyfirmaBIAlerts_ccp/CyfirmaBIAlerts_DataConnectorDefinition.json","true" -"CyfirmaBIProductSolutionAlerts_CL","Cyfirma Brand Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Brand%20Intelligence","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-brand-intelligence","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaBrandIntelligenceAlertsDC","Microsoft","CYFIRMA Brand Intelligence","","[{""title"": ""CYFIRMA Brand Intelligence"", ""description"": ""Connect to CYFIRMA Brand Intelligence to ingest alerts data into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT Alerts API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into custom tables during ingestion. This enhances performance and efficiency by eliminating the need for query-time parsing."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Brand%20Intelligence/Data%20Connectors/CyfirmaBIAlerts_ccp/CyfirmaBIAlerts_DataConnectorDefinition.json","true" -"CyfirmaBISocialHandlersAlerts_CL","Cyfirma Brand Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Brand%20Intelligence","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-brand-intelligence","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaBrandIntelligenceAlertsDC","Microsoft","CYFIRMA Brand Intelligence","","[{""title"": ""CYFIRMA Brand Intelligence"", ""description"": ""Connect to CYFIRMA Brand Intelligence to ingest alerts data into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT Alerts API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into custom tables during ingestion. This enhances performance and efficiency by eliminating the need for query-time parsing."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Brand%20Intelligence/Data%20Connectors/CyfirmaBIAlerts_ccp/CyfirmaBIAlerts_DataConnectorDefinition.json","true" -"","Cyfirma Compromised Accounts","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Compromised%20Accounts","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirmacompromisedaccounts","2025-05-15","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","","","","","","","false","","false" -"CyfirmaCompromisedAccounts_CL","Cyfirma Compromised Accounts","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Compromised%20Accounts","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirmacompromisedaccounts","2025-05-15","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaCompromisedAccountsDataConnector","Microsoft","CYFIRMA Compromised Accounts","The CYFIRMA Compromised Accounts data connector enables seamless log ingestion from the DeCYFIR/DeTCT API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR/DeTCT API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency.","[{""title"": ""CYFIRMA Compromised Accounts"", ""description"": ""The CYFIRMA Compromised Accounts Data Connector enables seamless log ingestion from the DeCYFIR/DeTCT API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR/DeTCT API to retrieve logs. Additionally, it supports DCR-based ingestion time transformations, which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""Setting it to true returns only data added since the last API call, while false returns all available data.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Compromised%20Accounts/Data%20Connectors/CyfirmaCompromisedAccounts_ccp/CyfirmaCompAcc_DataConnectorDefinition.json","true" -"","Cyfirma Cyber Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Cyber%20Intelligence","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-cyber-intelligence","2025-05-15","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","","","","","","","false","","false" -"CyfirmaCampaigns_CL","Cyfirma Cyber Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Cyber%20Intelligence","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-cyber-intelligence","2025-05-15","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaCyberIntelligenceDC","Microsoft","CYFIRMA Cyber Intelligence","The CYFIRMA Cyber Intelligence data connector enables seamless log ingestion from the DeCYFIR API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR Alerts API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency.","[{""title"": ""CYFIRMA Cyber Intelligence"", ""description"": ""This connector provides the Indicators, Threat actors, Malware and Campaigns logs from CYFIRMA Cyber Intelligence. The connector uses the DeCYFIR API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency."", ""descriptionMarkdown"": ""This connector provides the Indicators, Threat actors, Malware and Campaigns logs from CYFIRMA Cyber Intelligence. The connector uses the DeCYFIR API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Pull all IoC's Or Tailored IoC's"", ""placeholder"": ""All IoC's or Tailored IoC's"", ""type"": ""text"", ""name"": ""apiAll"", ""defaultValue"": ""false"", ""description"": ""Set to true to pull all IoC's, set to false to pull tailoried IoC's""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""defaultValue"": ""false"", ""description"": ""Setting it to true returns only data added since the last API call, while false returns data from the last 24 hours.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Recommended Actions"", ""placeholder"": ""Recommended Action can be any one of:All/Monitor/Block"", ""type"": ""text"", ""name"": ""recommendedActions"", ""defaultValue"": ""All"", ""description"": ""Recommended Action can be any one of:All/Monitor/Block""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Threat Actor Associated"", ""placeholder"": ""Is any Threat Actor Associated with the IoC's"", ""type"": ""text"", ""name"": ""isThreatActorExists"", ""defaultValue"": ""false"", ""description"": ""Is any Threat Actor Associated with the IoC's""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""toggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Cyber%20Intelligence/Data%20Connectors/CyfirmaCyberIntelligence_ccp/CyfirmaCyberIntel_DataConnectorDefinition.json","true" -"CyfirmaIndicators_CL","Cyfirma Cyber Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Cyber%20Intelligence","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-cyber-intelligence","2025-05-15","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaCyberIntelligenceDC","Microsoft","CYFIRMA Cyber Intelligence","The CYFIRMA Cyber Intelligence data connector enables seamless log ingestion from the DeCYFIR API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR Alerts API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency.","[{""title"": ""CYFIRMA Cyber Intelligence"", ""description"": ""This connector provides the Indicators, Threat actors, Malware and Campaigns logs from CYFIRMA Cyber Intelligence. The connector uses the DeCYFIR API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency."", ""descriptionMarkdown"": ""This connector provides the Indicators, Threat actors, Malware and Campaigns logs from CYFIRMA Cyber Intelligence. The connector uses the DeCYFIR API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Pull all IoC's Or Tailored IoC's"", ""placeholder"": ""All IoC's or Tailored IoC's"", ""type"": ""text"", ""name"": ""apiAll"", ""defaultValue"": ""false"", ""description"": ""Set to true to pull all IoC's, set to false to pull tailoried IoC's""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""defaultValue"": ""false"", ""description"": ""Setting it to true returns only data added since the last API call, while false returns data from the last 24 hours.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Recommended Actions"", ""placeholder"": ""Recommended Action can be any one of:All/Monitor/Block"", ""type"": ""text"", ""name"": ""recommendedActions"", ""defaultValue"": ""All"", ""description"": ""Recommended Action can be any one of:All/Monitor/Block""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Threat Actor Associated"", ""placeholder"": ""Is any Threat Actor Associated with the IoC's"", ""type"": ""text"", ""name"": ""isThreatActorExists"", ""defaultValue"": ""false"", ""description"": ""Is any Threat Actor Associated with the IoC's""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""toggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Cyber%20Intelligence/Data%20Connectors/CyfirmaCyberIntelligence_ccp/CyfirmaCyberIntel_DataConnectorDefinition.json","true" -"CyfirmaMalware_CL","Cyfirma Cyber Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Cyber%20Intelligence","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-cyber-intelligence","2025-05-15","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaCyberIntelligenceDC","Microsoft","CYFIRMA Cyber Intelligence","The CYFIRMA Cyber Intelligence data connector enables seamless log ingestion from the DeCYFIR API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR Alerts API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency.","[{""title"": ""CYFIRMA Cyber Intelligence"", ""description"": ""This connector provides the Indicators, Threat actors, Malware and Campaigns logs from CYFIRMA Cyber Intelligence. The connector uses the DeCYFIR API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency."", ""descriptionMarkdown"": ""This connector provides the Indicators, Threat actors, Malware and Campaigns logs from CYFIRMA Cyber Intelligence. The connector uses the DeCYFIR API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Pull all IoC's Or Tailored IoC's"", ""placeholder"": ""All IoC's or Tailored IoC's"", ""type"": ""text"", ""name"": ""apiAll"", ""defaultValue"": ""false"", ""description"": ""Set to true to pull all IoC's, set to false to pull tailoried IoC's""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""defaultValue"": ""false"", ""description"": ""Setting it to true returns only data added since the last API call, while false returns data from the last 24 hours.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Recommended Actions"", ""placeholder"": ""Recommended Action can be any one of:All/Monitor/Block"", ""type"": ""text"", ""name"": ""recommendedActions"", ""defaultValue"": ""All"", ""description"": ""Recommended Action can be any one of:All/Monitor/Block""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Threat Actor Associated"", ""placeholder"": ""Is any Threat Actor Associated with the IoC's"", ""type"": ""text"", ""name"": ""isThreatActorExists"", ""defaultValue"": ""false"", ""description"": ""Is any Threat Actor Associated with the IoC's""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""toggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Cyber%20Intelligence/Data%20Connectors/CyfirmaCyberIntelligence_ccp/CyfirmaCyberIntel_DataConnectorDefinition.json","true" -"CyfirmaThreatActors_CL","Cyfirma Cyber Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Cyber%20Intelligence","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-cyber-intelligence","2025-05-15","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaCyberIntelligenceDC","Microsoft","CYFIRMA Cyber Intelligence","The CYFIRMA Cyber Intelligence data connector enables seamless log ingestion from the DeCYFIR API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR Alerts API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency.","[{""title"": ""CYFIRMA Cyber Intelligence"", ""description"": ""This connector provides the Indicators, Threat actors, Malware and Campaigns logs from CYFIRMA Cyber Intelligence. The connector uses the DeCYFIR API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency."", ""descriptionMarkdown"": ""This connector provides the Indicators, Threat actors, Malware and Campaigns logs from CYFIRMA Cyber Intelligence. The connector uses the DeCYFIR API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Pull all IoC's Or Tailored IoC's"", ""placeholder"": ""All IoC's or Tailored IoC's"", ""type"": ""text"", ""name"": ""apiAll"", ""defaultValue"": ""false"", ""description"": ""Set to true to pull all IoC's, set to false to pull tailoried IoC's""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""defaultValue"": ""false"", ""description"": ""Setting it to true returns only data added since the last API call, while false returns data from the last 24 hours.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Recommended Actions"", ""placeholder"": ""Recommended Action can be any one of:All/Monitor/Block"", ""type"": ""text"", ""name"": ""recommendedActions"", ""defaultValue"": ""All"", ""description"": ""Recommended Action can be any one of:All/Monitor/Block""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Threat Actor Associated"", ""placeholder"": ""Is any Threat Actor Associated with the IoC's"", ""type"": ""text"", ""name"": ""isThreatActorExists"", ""defaultValue"": ""false"", ""description"": ""Is any Threat Actor Associated with the IoC's""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""toggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Cyber%20Intelligence/Data%20Connectors/CyfirmaCyberIntelligence_ccp/CyfirmaCyberIntel_DataConnectorDefinition.json","true" -"","Cyfirma Digital Risk","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-digital-risk","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","","","","","","","false","","false" -"CyfirmaDBWMDarkWebAlerts_CL","Cyfirma Digital Risk","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-digital-risk","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaDigitalRiskAlertsConnector","Microsoft","CYFIRMA Digital Risk","The CYFIRMA Digital Risk Alerts data connector enables seamless log ingestion from the DeCYFIR/DeTCT API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR Alerts API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency.","[{""title"": ""CYFIRMA Digital Risk"", ""description"": ""Connect to CYFIRMA Digital Risk Alerts to ingest logs into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT API to retrieve alerts and supports DCR-based ingestion time transformations for efficient log parsing."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk/Data%20Connectors/CyfirmaDigitalRiskAlerts_ccp/CyfirmaDigitalRiskAlerts_DataConnectorDefinition.json","true" -"CyfirmaDBWMPhishingAlerts_CL","Cyfirma Digital Risk","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-digital-risk","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaDigitalRiskAlertsConnector","Microsoft","CYFIRMA Digital Risk","The CYFIRMA Digital Risk Alerts data connector enables seamless log ingestion from the DeCYFIR/DeTCT API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR Alerts API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency.","[{""title"": ""CYFIRMA Digital Risk"", ""description"": ""Connect to CYFIRMA Digital Risk Alerts to ingest logs into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT API to retrieve alerts and supports DCR-based ingestion time transformations for efficient log parsing."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk/Data%20Connectors/CyfirmaDigitalRiskAlerts_ccp/CyfirmaDigitalRiskAlerts_DataConnectorDefinition.json","true" -"CyfirmaDBWMRansomwareAlerts_CL","Cyfirma Digital Risk","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-digital-risk","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaDigitalRiskAlertsConnector","Microsoft","CYFIRMA Digital Risk","The CYFIRMA Digital Risk Alerts data connector enables seamless log ingestion from the DeCYFIR/DeTCT API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR Alerts API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency.","[{""title"": ""CYFIRMA Digital Risk"", ""description"": ""Connect to CYFIRMA Digital Risk Alerts to ingest logs into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT API to retrieve alerts and supports DCR-based ingestion time transformations for efficient log parsing."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk/Data%20Connectors/CyfirmaDigitalRiskAlerts_ccp/CyfirmaDigitalRiskAlerts_DataConnectorDefinition.json","true" -"CyfirmaSPEConfidentialFilesAlerts_CL","Cyfirma Digital Risk","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-digital-risk","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaDigitalRiskAlertsConnector","Microsoft","CYFIRMA Digital Risk","The CYFIRMA Digital Risk Alerts data connector enables seamless log ingestion from the DeCYFIR/DeTCT API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR Alerts API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency.","[{""title"": ""CYFIRMA Digital Risk"", ""description"": ""Connect to CYFIRMA Digital Risk Alerts to ingest logs into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT API to retrieve alerts and supports DCR-based ingestion time transformations for efficient log parsing."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk/Data%20Connectors/CyfirmaDigitalRiskAlerts_ccp/CyfirmaDigitalRiskAlerts_DataConnectorDefinition.json","true" -"CyfirmaSPEPIIAndCIIAlerts_CL","Cyfirma Digital Risk","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-digital-risk","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaDigitalRiskAlertsConnector","Microsoft","CYFIRMA Digital Risk","The CYFIRMA Digital Risk Alerts data connector enables seamless log ingestion from the DeCYFIR/DeTCT API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR Alerts API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency.","[{""title"": ""CYFIRMA Digital Risk"", ""description"": ""Connect to CYFIRMA Digital Risk Alerts to ingest logs into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT API to retrieve alerts and supports DCR-based ingestion time transformations for efficient log parsing."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk/Data%20Connectors/CyfirmaDigitalRiskAlerts_ccp/CyfirmaDigitalRiskAlerts_DataConnectorDefinition.json","true" -"CyfirmaSPESocialThreatAlerts_CL","Cyfirma Digital Risk","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-digital-risk","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaDigitalRiskAlertsConnector","Microsoft","CYFIRMA Digital Risk","The CYFIRMA Digital Risk Alerts data connector enables seamless log ingestion from the DeCYFIR/DeTCT API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR Alerts API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency.","[{""title"": ""CYFIRMA Digital Risk"", ""description"": ""Connect to CYFIRMA Digital Risk Alerts to ingest logs into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT API to retrieve alerts and supports DCR-based ingestion time transformations for efficient log parsing."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk/Data%20Connectors/CyfirmaDigitalRiskAlerts_ccp/CyfirmaDigitalRiskAlerts_DataConnectorDefinition.json","true" -"CyfirmaSPESourceCodeAlerts_CL","Cyfirma Digital Risk","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-digital-risk","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaDigitalRiskAlertsConnector","Microsoft","CYFIRMA Digital Risk","The CYFIRMA Digital Risk Alerts data connector enables seamless log ingestion from the DeCYFIR/DeTCT API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR Alerts API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency.","[{""title"": ""CYFIRMA Digital Risk"", ""description"": ""Connect to CYFIRMA Digital Risk Alerts to ingest logs into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT API to retrieve alerts and supports DCR-based ingestion time transformations for efficient log parsing."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk/Data%20Connectors/CyfirmaDigitalRiskAlerts_ccp/CyfirmaDigitalRiskAlerts_DataConnectorDefinition.json","true" -"","Cyfirma Vulnerabilities Intel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Vulnerabilities%20Intel","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-vulnerabilities","2025-05-15","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","","","","","","","false","","false" -"CyfirmaVulnerabilities_CL","Cyfirma Vulnerabilities Intel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Vulnerabilities%20Intel","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-vulnerabilities","2025-05-15","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaVulnerabilitiesIntelDC","Microsoft","CYFIRMA Vulnerabilities Intelligence","The CYFIRMA Vulnerabilities Intelligence data connector enables seamless log ingestion from the DeCYFIR API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the CYFIRMA API's to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency.","[{""title"": ""CYFIRMA Vulnerabilities Intelligence"", ""description"": ""This connector provides the Vulnerabilities logs from CYFIRMA Vulnerabilities Intelligence. The connector uses the DeCYFIR API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency."", ""descriptionMarkdown"": ""This connector provides the Vulnerabilities logs from CYFIRMA Vulnerabilities Intelligence. The connector uses the DeCYFIR API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""defaultValue"": ""false"", ""description"": ""API Delta: If true (default), returns data since the last call; if false or unspecified, returns data from the last 24 hours.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Vendor-Associated Vulnerabilities"", ""placeholder"": """", ""type"": ""text"", ""name"": ""isVendor"", ""defaultValue"": ""false"", ""description"": ""The value for Vendor-Associated Vulnerabilities can be either true or false.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Product-Associated Vulnerabilities"", ""placeholder"": """", ""type"": ""text"", ""name"": ""isProduct"", ""defaultValue"": ""false"", ""description"": ""The value for Product-Associated Vulnerabilities can be either true or false.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Product with Version-Associated Vulnerabilities"", ""placeholder"": """", ""type"": ""text"", ""name"": ""isVersion"", ""defaultValue"": ""false"", ""description"": ""The value for Version-Associated Vulnerabilities can be either true or false.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Vulnerabilities%20Intel/Data%20Connectors/CyfirmaVulnerabilitiesIntel_ccp/CyfirmaVulnerabilities_DataConnectorDefinition.json","true" -"","Cynerio","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cynerio","cynerio1681887657820","cynerio-medical-device-security-sentinel-connector","2023-03-29","2023-03-29","","Cynerio","Partner","https://cynerio.com","","domains","","","","","","","false","","false" -"CynerioEvent_CL","Cynerio","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cynerio","cynerio1681887657820","cynerio-medical-device-security-sentinel-connector","2023-03-29","2023-03-29","","Cynerio","Partner","https://cynerio.com","","domains","CynerioSecurityEvents","Cynerio","Cynerio Security Events","The [Cynerio](https://www.cynerio.com/) connector allows you to easily connect your Cynerio Security Events with Microsoft Sentinel, to view IDS Events. This gives you more insight into your organization network security posture and improves your security operation capabilities. ","[{""title"": ""Configure and connect Cynerio"", ""description"": ""Cynerio can integrate with and export events directly to Microsoft Sentinel via Azure Server. Follow these steps to establish integration:\n\n1. In the Cynerio console, go to Settings > Integrations tab (default), and click on the **+Add Integration** button at the top right.\n\n2. Scroll down to the **SIEM** section.\n\n3. On the Microsoft Sentinel card, click the Connect button.\n\n4. The Integration Details window opens. Use the parameters below to fill out the form and set up the connection."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cynerio/Data%20Connectors/Cynerio_Connector.json","true" -"","Cyware","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyware","cywarelabsinc1709256751930","microsoft-sentinel-solution-cyware","2024-03-18","2024-03-18","","Cyware","Partner","","","domains","","","","","","","false","","false" -"","DEV-0537DetectionandHunting","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/DEV-0537DetectionandHunting","azuresentinel","azure-sentinel-solution-DEV-0537DetectionandHunting","2022-04-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"","DNS Essentials","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/DNS%20Essentials","azuresentinel","azure-sentinel-solution-dns-domain","2023-01-14","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"","DORA Compliance","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/DORA%20Compliance","azuresentinel","azure-sentinel-solution-doracompliance","2025-10-08","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"","Darktrace","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Darktrace","darktrace1655286944672","darktrace_for_sentinel","2022-05-02","","","Darktrace","Partner","https://www.darktrace.com/en/contact/","","domains","","","","","","","false","","false" -"darktrace_model_alerts_CL","Darktrace","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Darktrace","darktrace1655286944672","darktrace_for_sentinel","2022-05-02","","","Darktrace","Partner","https://www.darktrace.com/en/contact/","","domains","DarktraceRESTConnector","Darktrace","Darktrace Connector for Microsoft Sentinel REST API","The Darktrace REST API connector pushes real-time events from Darktrace to Microsoft Sentinel and is designed to be used with the Darktrace Solution for Sentinel. The connector writes logs to a custom log table titled ""darktrace_model_alerts_CL""; Model Breaches, AI Analyst Incidents, System Alerts and Email Alerts can be ingested - additional filters can be set up on the Darktrace System Configuration page. Data is pushed to Sentinel from Darktrace masters.","[{""title"": """", ""description"": ""1. Detailed setup instructions can be found on the Darktrace Customer Portal: https://customerportal.darktrace.com/product-guides/main/microsoft-sentinel-introduction\n 2. Take note of the Workspace ID and the Primary key. You will need to enter these details on your Darktrace System Configuration page.\n "", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Darktrace Configuration"", ""description"": ""1. Perform the following steps on the Darktrace System Configuration page:\n 2. Navigate to the System Configuration Page (Main Menu > Admin > System Config)\n 3. Go into Modules configuration and click on the \""Microsoft Sentinel\"" configuration card\n 4. Select \""HTTPS (JSON)\"" and hit \""New\""\n 5. Fill in the required details and select appropriate filters\n 6. Click \""Verify Alert Settings\"" to attempt authentication and send out a test alert\n 7. Run a \""Look for Test Alerts\"" sample query to validate that the test alert has been received"", ""instructions"": """"}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Darktrace Prerequisites"", ""description"": ""To use this Data Connector a Darktrace master running v5.2+ is required.\n Data is sent to the [Azure Monitor HTTP Data Collector API](https://docs.microsoft.com/azure/azure-monitor/logs/data-collector-api) over HTTPs from Darktrace masters, therefore outbound connectivity from the Darktrace master to Microsoft Sentinel REST API is required.""}, {""name"": ""Filter Darktrace Data"", ""description"": ""During configuration it is possible to set up additional filtering on the Darktrace System Configuration page to constrain the amount or types of data sent.""}, {""name"": ""Try the Darktrace Sentinel Solution"", ""description"": ""You can get the most out of this connector by installing the Darktrace Solution for Microsoft Sentinel. This will provide workbooks to visualise alert data and analytics rules to automatically create alerts and incidents from Darktrace Model Breaches and AI Analyst incidents.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Darktrace/Data%20Connectors/DarktraceConnectorRESTAPI.json","true" -"","Datalake2Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Datalake2Sentinel","cert_orange_cyberdefense","microsoft-sentinel-solution-datalake2sentinel","2024-01-15","2024-01-15","","Orange Cyberdefense","Partner","https://www.orangecyberdefense.com/global/contact","","domains,verticals","","","","","","","false","","false" -"ThreatIntelligenceIndicator","Datalake2Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Datalake2Sentinel","cert_orange_cyberdefense","microsoft-sentinel-solution-datalake2sentinel","2024-01-15","2024-01-15","","Orange Cyberdefense","Partner","https://www.orangecyberdefense.com/global/contact","","domains,verticals","Datalake2SentinelConnector","Orange Cyberdefense","Datalake2Sentinel","This solution installs the Datalake2Sentinel connector which is built using the Codeless Connector Platform and allows you to automatically ingest threat intelligence indicators from **Datalake Orange Cyberdefense's CTI platform** into Microsoft Sentinel via the Upload Indicators REST API. After installing the solution, configure and enable this data connector by following guidance in Manage solution view.","[{""title"": ""Installation and setup instructions"", ""description"": ""Use the documentation from this Github repository to install and configure the Datalake to Microsoft Sentinel connector. \n\nhttps://github.com/cert-orangecyberdefense/datalake2sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Datalake2Sentinel/Data%20Connectors/Datalake2SentinelConnector.json","true" -"","Dataminr Pulse","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dataminr%20Pulse","dataminrinc1648845584891","dataminr_sentinel","2023-04-12","2023-04-12","","Dataminr Support","Partner","https://www.dataminr.com/dataminr-support#support","","domains","","","","","","","false","","false" -"DataminrPulse_Alerts_CL","Dataminr Pulse","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dataminr%20Pulse","dataminrinc1648845584891","dataminr_sentinel","2023-04-12","2023-04-12","","Dataminr Support","Partner","https://www.dataminr.com/dataminr-support#support","","domains","DataminrPulseAlerts","Dataminr","Dataminr Pulse Alerts Data Connector","Dataminr Pulse Alerts Data Connector brings our AI-powered real-time intelligence into Microsoft Sentinel for faster threat detection and response.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the DataminrPulse in which logs are pushed via Dataminr RTAP and it will ingest logs into Microsoft Sentinel. Furthermore, the connector will fetch the ingested data from the custom logs table and create Threat Intelligence Indicators into Microsoft Sentinel Threat Intelligence. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1- Credentials for the Dataminr Pulse Client ID and Client Secret**\n\n * Obtain Dataminr Pulse user ID/password and API client ID/secret from your Dataminr Customer Success Manager (CSM).""}, {""title"": """", ""description"": ""**STEP 2- Configure Watchlists in Dataminr Pulse portal.**\n\n Follow the steps in this section to configure watchlists in portal:\n\n 1. **Login** to the Dataminr Pulse [website](https://app.dataminr.com).\n\n 2. Click on the settings gear icon, and select **Manage Lists**.\n\n 3. Select the type of Watchlist you want to create (Cyber, Topic, Company, etc.) and click the **New List** button.\n\n 4. Provide a **name** for your new Watchlist, and select a highlight color for it, or keep the default color.\n\n 5. When you are done configuring the Watchlist, click **Save** to save it.""}, {""title"": """", ""description"": ""**STEP 3 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of DataminrPulse Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 4 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of DataminrPulse Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of DataminrPulse Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Dataminr Pulse Microsoft Sentinel data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DataminrPulse connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-DataminrPulseAlerts-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-DataminrPulseAlerts-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\n\t a. **Function Name** \n\n\t b. **Location**: The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t c. **Workspace**: Enter Workspace ID of log analytics Workspace ID \n\n\t d. **Workspace Key**: Enter Primary Key of log analytics Workspace \n\n\t e. **DataminrBaseURL**: Enter Base URL starting with \""https://\"" followed by hostname (Example: https://gateway.dataminr.com/) \n\n\t f. **ClientId**: Enter your Dataminr account Client ID \n\n\t g. **ClientSecret**: Enter your Dataminr account Client Secret \n\n\t h. **AzureEntraObjectID**: Enter Object id of your Microsoft Entra App \n\n\t i. **AlertsTableName**: Enter name of the table used to store Dataminr Alerts logs. Default is 'DataminrPulse_Alerts' \n\n\t j. **AzureClientId**: Enter Azure Client ID that you have created during app registration \n\n\t k. **AzureClientSecret**: Enter Azure Client Secret that you have created during creating the client secret \n\n\t l. **AzureTenantId**: Enter Azure Tenant ID of your Azure Active Directory \n\n\t m. **AzureResourceGroupName**: Enter Azure Resource Group Name in which you want deploy the data connector \n\n\t n. **AzureWorkspaceName**: Enter Microsoft Sentinel Workspace Name of Log Analytics workspace \n\n\t o. **AzureSubscriptionId**: Enter Azure Subscription Id which is present in the subscription tab in Microsoft Sentinel \n\n\t p. **LogLevel**: Add log level or log severity value. Default is 'INFO' \n\n\t q. **Schedule**: Enter a valid Quartz Cron-Expression (Example: 0 0 0 * * *) \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Dataminr Pulse Microsoft Sentinel data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": ""1) Deploy a Function App"", ""description"": ""> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-DataminrPulseAlerts-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. DmPulseXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": ""2) Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\n\t a. **Function Name** \n\n\t b. **Location**: The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t c. **Workspace**: Enter Workspace ID of log analytics Workspace ID \n\n\t d. **Workspace Key**: Enter Primary Key of log analytics Workspace \n\n\t e. **DataminrBaseURL**: Enter Base URL starting with \""https://\"" followed by hostname (Example: https://gateway.dataminr.com/) \n\n\t f. **ClientId**: Enter your Dataminr account Client ID \n\n\t g. **ClientSecret**: Enter your Dataminr account Client Secret \n\n\t h. **AzureEntraObjectID**: Enter Object id of your Microsoft Entra App \n\n\t i. **AlertsTableName**: Enter name of the table used to store Dataminr Alerts logs. Default is 'DataminrPulse_Alerts' \n\n\t j. **AzureClientId**: Enter Azure Client ID that you have created during app registration \n\n\t k. **AzureClientSecret**: Enter Azure Client Secret that you have created during creating the client secret \n\n\t l. **AzureTenantId**: Enter Azure Tenant ID of your Azure Active Directory \n\n\t m. **AzureResourceGroupName**: Enter Azure Resource Group Name in which you want deploy the data connector \n\n\t n. **AzureWorkspaceName**: Enter Microsoft Sentinel Workspace Name of Log Analytics workspace \n\n\t o. **AzureSubscriptionId**: Enter Azure Subscription Id which is present in the subscription tab in Microsoft Sentinel \n\n\t p. **LogLevel**: Add log level or log severity value. Default is 'INFO' \n\n\t q. **Schedule**: Enter a valid Quartz Cron-Expression (Example: 0 0 0 * * *) \n\n\t r. **logAnalyticsUri** (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}, {""title"": """", ""description"": ""**STEP 7 - Post Deployment steps**\n\n""}, {""title"": ""1) Get the Function app endpoint"", ""description"": ""1. Go to Azure function Overview page and Click on **\""Functions\""** in the left blade.\n2. Click on the function called **\""DataminrPulseAlertsHttpStarter\""**.\n3. Go to **\""GetFunctionurl\""** and copy the function url.\n4. Replace **{functionname}** with **\""DataminrPulseAlertsSentinelOrchestrator\""** in copied function url.""}, {""title"": ""2) To add integration settings in Dataminr RTAP using the function URL"", ""description"": ""1. Open any API request tool like Postman.\n2. Click on '+' to create a new request.\n3. Select HTTP request method as **'POST'**.\n4. Enter the url prepapred in **point 1)**, in the request URL part.\n5. In Body, select raw JSON and provide request body as below(case-sensitive): \n\t\t{ \n\t\t \""integration-settings\"": \""ADD\"", \n\t\t \""url\"": \""`(URL part from copied Function-url)`\"", \n\t\t \""token\"": \""`(value of code parameter from copied Function-url)`\"" \n\t\t}\n6. After providing all required details, click **Send**.\n7. You will receive an integration setting ID in the HTTP response with a status code of 200.\n8. Save **Integration ID** for future reference.""}, {""title"": """", ""description"": ""*Now we are done with the adding integration settings for Dataminr RTAP. Once the Dataminr RTAP send an alert data, Function app is triggered and you should be able to see the Alerts data from the Dataminr Pulse into LogAnalytics workspace table called \""DataminrPulse_Alerts_CL\"".*\n\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Required Dataminr Credentials/permissions"", ""description"": ""\n\na. Users must have a valid Dataminr Pulse API **client ID** and **secret** to use this data connector.\n\n b. One or more Dataminr Pulse Watchlists must be configured in the Dataminr Pulse website.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dataminr%20Pulse/Data%20Connectors/DataminrPulseAlerts/DataminrPulseAlerts_FunctionApp.json","true" -"","Datawiza","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Datawiza","datawiza","datawiza-sentinel-solution-dap","2025-11-10","","","Datawiza Technology Inc.","Partner","https://www.datawiza.com/contact-us/","","domains","","","","","","","false","","false" -"datawizaserveraccess_CL","Datawiza","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Datawiza","datawiza","datawiza-sentinel-solution-dap","2025-11-10","","","Datawiza Technology Inc.","Partner","https://www.datawiza.com/contact-us/","","domains","DatawizaDapSolution","Datawiza","Datawiza DAP","Connects the Datawiza DAP logs to Azure Log Analytics via the REST API interface","[{""title"": ""Step 1 : Read the detailed documentation"", ""description"": ""The installation process is documented in great detail in the documentation site [Microsoft Sentinel integration](https://docs.datawiza.com/tutorial/integrate-with-microsoft-sentinel.html). The user should consult our support (support@datawiza.com) further to understand installation and debug of the integration.""}, {""title"": ""Step 2: Install the Datawiza Sentinel Connector"", ""description"": ""The next step is to install the Datawiza log forwarder to send logs to Microsoft Sentinel. The exact installation will depend on your environment, consult the [Microsoft Sentinel integration](https://docs.datawiza.com/tutorial/integrate-with-microsoft-sentinel.html) for full details. ""}, {""title"": ""Step 3: Test the data ingestion"", ""description"": ""After approximately 20 minutes access the Log Analytics workspace on your Microsoft Sentinel installation, and locate the *Custom Logs* section verify that a *datawizaserveraccess_CL* table exists. Use the sample queries to examine the data.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Datawiza/Data%20Connectors/Datawiza_DAP.json","true" -"","Delinea Secret Server","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Delinea%20Secret%20Server","delineainc1653506022260","delinea_secret_server_mss","2022-05-06","","","Delinea","Partner","https://delinea.com/support/","","domains","","","","","","","false","","false" -"CommonSecurityLog","Delinea Secret Server","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Delinea%20Secret%20Server","delineainc1653506022260","delinea_secret_server_mss","2022-05-06","","","Delinea","Partner","https://delinea.com/support/","","domains","DelineaSecretServerAma","Delinea, Inc","[Deprecated] Delinea Secret Server via AMA","Common Event Format (CEF) from Delinea Secret Server ","[{""title"": """", ""description"": """", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine""}, {""title"": ""Step B. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address.""}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Delinea%20Secret%20Server/Data%20Connectors/template_DelineaSecretServerAMA.json","true" -"CommonSecurityLog","Delinea Secret Server","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Delinea%20Secret%20Server","delineainc1653506022260","delinea_secret_server_mss","2022-05-06","","","Delinea","Partner","https://delinea.com/support/","","domains","DelineaSecretServer_CEF","Delinea, Inc","[Deprecated] Delinea Secret Server via Legacy Agent","Common Event Format (CEF) from Delinea Secret Server ","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Delinea Secret Server"", ""description"": ""must be configured to export logs via Syslog \n\n [Learn more about configure Secret Server](https://thy.center/ss/link/syslog)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Delinea%20Secret%20Server/Data%20Connectors/DelineaSecretServer_CEF.json","true" -"","Dev 0270 Detection and Hunting","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dev%200270%20Detection%20and%20Hunting","azuresentinel","azure-sentinel-solution-dev0270detectionandhunting","2022-11-29","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"","Digital Guardian Data Loss Prevention","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Digital%20Guardian%20Data%20Loss%20Prevention","azuresentinel","azure-sentinel-solution-digitalguardiandlp","2021-07-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"Syslog","Digital Guardian Data Loss Prevention","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Digital%20Guardian%20Data%20Loss%20Prevention","azuresentinel","azure-sentinel-solution-digitalguardiandlp","2021-07-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","DigitalGuardianDLP","Digital Guardian","[Deprecated] Digital Guardian Data Loss Prevention","[Digital Guardian Data Loss Prevention (DLP)](https://digitalguardian.com/platform-overview) data connector provides the capability to ingest Digital Guardian DLP logs into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**DigitalGuardianDLPEvent**](https://aka.ms/sentinel-DigitalGuardianDLP-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Configure Digital Guardian to forward logs via Syslog to remote server where you will install the agent."", ""description"": ""Follow these steps to configure Digital Guardian to forward logs via Syslog:\n\n1.1. Log in to the Digital Guardian Management Console.\n\n1.2. Select **Workspace** > **Data Export** > **Create Export**.\n\n1.3. From the **Data Sources** list, select **Alerts** or **Events** as the data source.\n\n1.4. From the **Export type** list, select **Syslog**.\n\n1.5. From the **Type list**, select **UDP** or **TCP** as the transport protocol.\n\n1.6. In the **Server** field, type the IP address of your Remote Syslog server.\n\n1.7. In the **Port** field, type 514 (or other port if your Syslog server was configured to use non-default port).\n\n1.8. From the **Severity Level** list, select a severity level.\n\n1.9. Select the **Is Active** check box.\n\n1.9. Click **Next**.\n\n1.10. From the list of available fields, add Alert or Event fields for your data export.\n\n1.11. Select a Criteria for the fields in your data export and click **Next**.\n\n1.12. Select a group for the criteria and click **Next**.\n\n1.13. Click **Test Query**.\n\n1.14. Click **Next**.\n\n1.15. Save the data export.""}, {""title"": ""2. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Server to which the logs will be forwarded.\n\n> Logs on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""3. Check logs in Microsoft Sentinel"", ""description"": ""Open Log Analytics to check if the logs are received using the Syslog schema.\n\n>**NOTE:** It may take up to 15 minutes before new logs will appear in Syslog table."", ""instructions"": []}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Digital%20Guardian%20Data%20Loss%20Prevention/Data%20Connectors/Connector_DigitalGuardian_Syslog.json","true" -"","Digital Shadows","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Digital%20Shadows","digitalshadows1662022995707","digitalshadows_searchlight_for_sentinel","","","","Digital Shadows","Partner","https://www.digitalshadows.com/","","domains","","","","","","","false","","false" -"DigitalShadows_CL","Digital Shadows","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Digital%20Shadows","digitalshadows1662022995707","digitalshadows_searchlight_for_sentinel","","","","Digital Shadows","Partner","https://www.digitalshadows.com/","","domains","DigitalShadowsSearchlightAzureFunctions","Digital Shadows","Digital Shadows Searchlight","The Digital Shadows data connector provides ingestion of the incidents and alerts from Digital Shadows Searchlight into the Microsoft Sentinel using the REST API. The connector will provide the incidents and alerts information such that it helps to examine, diagnose and analyse the potential security risks and threats.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a 'Digital Shadows Searchlight' to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the 'Digital Shadows Searchlight' API**\n\nThe provider should provide or link to detailed steps to configure the 'Digital Shadows Searchlight' API endpoint so that the Azure Function can authenticate to it successfully, get its authorization key or token, and pull the appliance's logs into Microsoft Sentinel.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the 'Digital Shadows Searchlight' connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the 'Digital Shadows Searchlight' API authorization key(s) or Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": ""**Option 1 - Azure Resource Manager (ARM) Template**\n\nUse this method for automated deployment of the 'Digital Shadows Searchlight' connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-Digitalshadows-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **API Username**, **API Password**, 'and/or Other required fields'. \n>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": """", ""description"": ""**Option 2 - Manual Deployment of Azure Functions**\n\n Use the following step-by-step instructions to deploy the 'Digital Shadows Searchlight' connector manually with Azure Functions.""}, {""title"": ""1. Create a Function App"", ""description"": ""1. From the Azure Portal, navigate to [Function App](https://portal.azure.com/#blade/HubsExtension/BrowseResource/resourceType/Microsoft.Web%2Fsites/kind/functionapp).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, ensure Runtime stack is set to **python 3.11**. \n4. In the **Hosting** tab, ensure **Plan type** is set to **'Consumption (Serverless)'**.\n5.select Storage account\n6. 'Add other required configurations'. \n5. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""2. Import Function App Code(Zip deployment)"", ""description"": ""1. Install Azure CLI\n2. From terminal type **az functionapp deployment source config-zip -g -n --src ** and hit enter. Set the `ResourceGroup` value to: your resource group name. Set the `FunctionApp` value to: your newly created function app name. Set the `Zip File` value to: `digitalshadowsConnector.zip`(path to your zip file). Note:- Download the zip file from the link - [Function App Code](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Digital%20Shadows/Data%20Connectors/Digital%20Shadows/digitalshadowsConnector.zip)""}, {""title"": ""3. Configure the Function App"", ""description"": ""1. In the Function App screen, click the Function App name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following 'x (number of)' application settings individually, under Name, with their respective string values (case-sensitive) under Value: \n\t\tDigitalShadowsAccountID\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tDigitalShadowsKey\n\t\tDigitalShadowsSecret\n\t\tHistoricalDays\n\t\tDigitalShadowsURL\n\t\tClassificationFilterOperation\n\t\tHighVariabilityClassifications\n\t\tFUNCTION_NAME\n\t\tlogAnalyticsUri (optional)\n(add any other settings required by the Function App)\nSet the `DigitalShadowsURL` value to: `https://api.searchlight.app/v1`\nSet the `HighVariabilityClassifications` value to: `exposed-credential,marked-document`\nSet the `ClassificationFilterOperation` value to: `exclude` for exclude function app or `include` for include function app \n>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Azure Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: https://.ods.opinsights.azure.us. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Digital Shadows account ID, secret and key** is required. See the documentation to learn more about API on the `https://portal-digitalshadows.com/learn/searchlight-api/overview/description`.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Digital%20Shadows/Data%20Connectors/Digital%20Shadows/DigitalShadowsSearchlight_API_functionApp.json","true" -"","DomainTools","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/DomainTools","domaintoolsllc1647901527537","domaintools-iris-investigate","2022-10-20","","","DomainTools","Partner","https://www.domaintools.com/support/","","domains","","","","","","","false","","false" -"","Doppel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Doppel","doppel","azure-sentinel-solution-doppel","2024-11-20","","","Doppel","Partner","https://www.doppel.com/request-a-demo","","domains","","","","","","","false","","false" -"DoppelTable_CL","Doppel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Doppel","doppel","azure-sentinel-solution-doppel","2024-11-20","","","Doppel","Partner","https://www.doppel.com/request-a-demo","","domains","Doppel_DataConnector","Doppel","Doppel Data Connector","The data connector is built on Microsoft Sentinel for Doppel events and alerts and supports DCR-based [ingestion time transformations](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/ingestion-time-transformations) that parses the received security event data into a custom columns so that queries don't need to parse it again, thus resulting in better performance.","[{""title"": ""Configure Doppel Webhook"", ""description"": ""Configure the Webhook in Doppel and Endpoint with permissions in Microsoft Sentinel to send data."", ""instructions"": [{""type"": ""InstructionStepsGroup"", ""parameters"": {""enable"": true, ""userRequestPlaceHolder"": """", ""instructionSteps"": [{""title"": ""Register the Application in Microsoft Entra ID"", ""description"": ""1. **Open the [Microsoft Entra ID page](https://entra.microsoft.com/)**:\n - Click the provided link to open the **Microsoft Entra ID** registration page in a new tab.\n - Ensure you are logged in with an account that has **Admin level** permissions.\n\n2. **Create a New Application**:\n - In the **Microsoft Entra ID portal**, select **App registrations** mentioned on the left-hand side tab.\n - Click on **+ New registration**.\n - Fill out the following fields:\n - **Name**: Enter a name for the app (e.g., \u201cDoppel App\u201d).\n - **Supported account types**: Choose **Accounts in this organizational directory only** (Default Directory only - Single tenant).\n - **Redirect URI**: Leave this blank unless required otherwise.\n - Click **Register** to create the application.\n\n3. **Copy Application and Tenant IDs**:\n - Once the app is registered, note the **Application (client) ID** and **Directory (tenant) ID** from the **Overview** page. You\u2019ll need these for the integration.\n\n4. **Create a Client Secret**:\n - In the **Certificates & secrets** section, click **+ New client secret**.\n - Add a description (e.g., 'Doppel Secret') and set an expiration (e.g., 1 year).\n - Click **Add**.\n - **Copy the client secret value immediately**, as it will not be shown again.""}, {""title"": ""Assign the \""Monitoring Metrics Publisher\"" Role to the App"", ""description"": ""1. **Open the Resource Group in Azure Portal**:\n - Navigate to the **Resource Group** that contains the **Log Analytics Workspace** and **Data Collection Rules (DCRs)** where you want the app to push data.\n\n2. **Assign the Role**:\n - In the **Resource Group** menu, click on **Access control (IAM)** mentioned on the left-hand side tab ..\n - Click on **+ Add** and select **Add role assignment**.\n - In the **Role** dropdown, search for and select the **Monitoring Metrics Publisher** role.\n - Under **Assign access to**, choose **Azure AD user, group, or service principal**.\n - In the **Select** field, search for your registered app by **name** or **client ID**.\n - Click **Save** to assign the role to the application.""}, {""title"": ""Deploy the ARM Template"", ""description"": ""1. **Retrieve the Workspace ID**:\n - After assigning the role, you will need the **Workspace ID**.\n - Navigate to the **Log Analytics Workspace** within the **Resource Group**.\n - In the **Overview** section, locate the **Workspace ID** field under **Workspace details**.\n - **Copy the Workspace ID** and keep it handy for the next steps.\n\n2. **Click the Deploy to Azure Button**:\n - [![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fmetron-labs%2FAzure-Sentinel%2Frefs%2Fheads%2FDoppelSolution%2FSolutions%2FDoppel%2FData%2520Connectors%2FDeployToAzure.json).\n - This will take you directly to the Azure portal to start the deployment.\n\n3. **Review and Customize Parameters**:\n - On the custom deployment page, ensure you\u2019re deploying to the correct **subscription** and **resource group**.\n - Fill in the parameters like **workspace name**, **workspace ID**, and **workspace location**.\n\n4. **Click Review + Create** and then **Create** to deploy the resources.""}, {""title"": ""Verify DCE, DCR, and Log Analytics Table Setup"", ""description"": ""1. **Check the Data Collection Endpoint (DCE)**:\n - After deploying, go to **Azure Portal > Data Collection Endpoints**.\n - Verify that the **DoppelDCE** endpoint has been created successfully.\n - **Copy the DCE Logs Ingestion URI**, as you\u2019ll need this for generating the webhook URL.\n\n2. **Confirm Data Collection Rule (DCR) Setup**:\n - Go to **Azure Portal > Data Collection Rules**.\n - Ensure the **DoppelDCR** rule is present.\n - **Copy the Immutable ID** of the DCR from the Overview page, as you\u2019ll need it for the webhook URL.\n\n3. **Validate Log Analytics Table**:\n - Navigate to your **Log Analytics Workspace** (linked to Microsoft Sentinel).\n - Under the **Tables** section, verify that the **DoppelTable_CL** table has been created successfully and is ready to receive data.""}, {""title"": ""Integrate Doppel Alerts with Microsoft Sentinel"", ""description"": ""1. **Gather Necessary Information**:\n - Collect the following details required for integration:\n - **Data Collection Endpoint ID (DCE-ID)**\n - **Data Collection Rule ID (DCR-ID)**\n - **Microsoft Entra Credentials**: Tenant ID, Client ID, and Client Secret.\n\n2. **Coordinate with Doppel Support**:\n - Share the collected DCE-ID, DCR-ID, and Microsoft Entra credentials with Doppel support.\n - Request assistance to configure these details in the Doppel tenant to enable webhook setup.\n\n3. **Webhook Setup by Doppel**:\n - Doppel will use the provided Resource IDs and credentials to configure a webhook.\n - This webhook will facilitate the forwarding of alerts from Doppel to Microsoft Sentinel.\n\n4. **Verify Alert Delivery in Microsoft Sentinel**:\n - Check that alerts from Doppel are successfully forwarded to Microsoft Sentinel.\n - Validate that the **Workbook** in Microsoft Sentinel is updated with the alert statistics, ensuring seamless data integration.""}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": false}}], ""customs"": [{""name"": ""Microsoft Entra Tenant ID, Client ID and Client Secret"", ""description"": ""Microsoft Entra ID requires a Client ID and Client Secret to authenticate your application. Additionally, Global Admin/Owner level access is required to assign the Entra-registered application a Resource Group Monitoring Metrics Publisher role.""}, {""name"": ""Requires Workspace ID, DCE-URI, DCR-ID"", ""description"": ""You will need to get the Log Analytics Workspace ID, DCE Logs Ingestion URI and DCR Immutable ID for the configuration.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Doppel/Data%20Connectors/Template_Doppel.json","true" -"","Dragos","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dragos","dragosinc1734451815609","microsoft-sentinel-solution-dragos","2025-01-23","2025-01-23","","Dragos Inc","Partner","https://www.dragos.com","","domains","","","","","","","false","","false" -"DragosAlerts_CL","Dragos","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dragos","dragosinc1734451815609","microsoft-sentinel-solution-dragos","2025-01-23","2025-01-23","","Dragos Inc","Partner","https://www.dragos.com","","domains","DragosSitestoreCCP","Dragos"," Dragos Notifications via Cloud Sitestore","The [Dragos Platform](https://www.dragos.com/) is the leading Industrial Cyber Security platform it offers a comprehensive Operational Technology (OT) cyber threat detection built by unrivaled industrial cybersecurity expertise. This solution enables Dragos Platform notification data to be viewed in Microsoft Sentinel so that security analysts are able to triage potential cyber security events occurring in their industrial environments.","[{""description"": ""Please provide the following information to allow Microsoft Sentinel to connect to your Dragos Sitestore."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Dragos Sitestore Hostname"", ""placeholder"": ""dragossitestore.example.com"", ""type"": ""text"", ""name"": ""dragosSitestoreHostname""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Dragos Sitestore API Key ID"", ""placeholder"": ""Enter the API key ID."", ""type"": ""text"", ""name"": ""username""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Dragos Sitestore API Key Secret"", ""placeholder"": ""Enter the API key secret"", ""type"": ""password"", ""name"": ""password""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Minimum Notification Severity. Valid values are 0-5 inclusive. Ensure less than or equal to maximum severity."", ""placeholder"": ""Enter the min severity (recommend 0 for all notifications)"", ""type"": ""number"", ""name"": ""minSeverity""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Maximum Notification Severity. Valid values are 0-5 inclusive. Ensure greater than or equal to minimum severity."", ""placeholder"": ""Enter the max severity (recommend 5 for all notifications)"", ""type"": ""number"", ""name"": ""maxSeverity""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect to Sitestore"", ""disconnectLabel"": ""Disconnect from Sitestore"", ""name"": ""connectionToggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Dragos Sitestore API access"", ""description"": ""A Sitestore user account that has the `notification:read` permission. This account also needs to have an API key that can be provided to Sentinel.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dragos/Data%20Connectors/DragosSiteStore_CCP/dragosSitestoreDataConnectorDefinition.json","true" -"","DruvaDataSecurityCloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/DruvaDataSecurityCloud","druva-azuresentinel-solution","azure-sentinel-solution-druva","2024-12-24","","","Druva Inc","Partner","https://support.druva.com/","","domains","","","","","","","false","","false" -"DruvaInsyncEvents_CL","DruvaDataSecurityCloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/DruvaDataSecurityCloud","druva-azuresentinel-solution","azure-sentinel-solution-druva","2024-12-24","","","Druva Inc","Partner","https://support.druva.com/","","domains","DruvaEventCCPDefinition","Microsoft","Druva Events Connector","Provides capability to ingest the Druva events from Druva APIs","[{""description"": "">Note: Configurations to connect to Druva Rest API\n""}, {""description"": ""Step 1: Create credentials from Druva console. Refer this doc for steps:- https://help.druva.com/en/articles/8580838-create-and-manage-api-credentials\n""}, {""description"": ""Step 2: Enter the hostname. For public cloud its apis.druva.com\n""}, {""description"": ""Step 3: Enter client id and client secret key\n""}, {""description"": ""Provide required values:\n"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Hostname"", ""placeholder"": ""Example: apis.druva.com"", ""type"": ""text"", ""name"": ""hostname""}}, {""type"": ""OAuthForm"", ""parameters"": {""clientIdLabel"": ""Client ID"", ""clientSecretLabel"": ""Client Secret"", ""connectButtonLabel"": ""Connect"", ""disconnectButtonLabel"": ""Disconnect""}}], ""title"": ""Connect to Druva API to start collecting logs in Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permission are required"", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Druva API Access"", ""description"": ""Druva API requires a client id and client secret to authenticate""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/DruvaDataSecurityCloud/Data%20Connectors/Druva_ccp/Druva_DataConnectorDefinition.json","true" -"DruvaPlatformEvents_CL","DruvaDataSecurityCloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/DruvaDataSecurityCloud","druva-azuresentinel-solution","azure-sentinel-solution-druva","2024-12-24","","","Druva Inc","Partner","https://support.druva.com/","","domains","DruvaEventCCPDefinition","Microsoft","Druva Events Connector","Provides capability to ingest the Druva events from Druva APIs","[{""description"": "">Note: Configurations to connect to Druva Rest API\n""}, {""description"": ""Step 1: Create credentials from Druva console. Refer this doc for steps:- https://help.druva.com/en/articles/8580838-create-and-manage-api-credentials\n""}, {""description"": ""Step 2: Enter the hostname. For public cloud its apis.druva.com\n""}, {""description"": ""Step 3: Enter client id and client secret key\n""}, {""description"": ""Provide required values:\n"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Hostname"", ""placeholder"": ""Example: apis.druva.com"", ""type"": ""text"", ""name"": ""hostname""}}, {""type"": ""OAuthForm"", ""parameters"": {""clientIdLabel"": ""Client ID"", ""clientSecretLabel"": ""Client Secret"", ""connectButtonLabel"": ""Connect"", ""disconnectButtonLabel"": ""Disconnect""}}], ""title"": ""Connect to Druva API to start collecting logs in Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permission are required"", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Druva API Access"", ""description"": ""Druva API requires a client id and client secret to authenticate""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/DruvaDataSecurityCloud/Data%20Connectors/Druva_ccp/Druva_DataConnectorDefinition.json","true" -"DruvaSecurityEvents_CL","DruvaDataSecurityCloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/DruvaDataSecurityCloud","druva-azuresentinel-solution","azure-sentinel-solution-druva","2024-12-24","","","Druva Inc","Partner","https://support.druva.com/","","domains","DruvaEventCCPDefinition","Microsoft","Druva Events Connector","Provides capability to ingest the Druva events from Druva APIs","[{""description"": "">Note: Configurations to connect to Druva Rest API\n""}, {""description"": ""Step 1: Create credentials from Druva console. Refer this doc for steps:- https://help.druva.com/en/articles/8580838-create-and-manage-api-credentials\n""}, {""description"": ""Step 2: Enter the hostname. For public cloud its apis.druva.com\n""}, {""description"": ""Step 3: Enter client id and client secret key\n""}, {""description"": ""Provide required values:\n"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Hostname"", ""placeholder"": ""Example: apis.druva.com"", ""type"": ""text"", ""name"": ""hostname""}}, {""type"": ""OAuthForm"", ""parameters"": {""clientIdLabel"": ""Client ID"", ""clientSecretLabel"": ""Client Secret"", ""connectButtonLabel"": ""Connect"", ""disconnectButtonLabel"": ""Disconnect""}}], ""title"": ""Connect to Druva API to start collecting logs in Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permission are required"", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Druva API Access"", ""description"": ""Druva API requires a client id and client secret to authenticate""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/DruvaDataSecurityCloud/Data%20Connectors/Druva_ccp/Druva_DataConnectorDefinition.json","true" -"","Dynamics 365","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dynamics%20365","sentinel4dynamics365","dynamics365connector","2023-01-17","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"Dynamics365Activity","Dynamics 365","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dynamics%20365","sentinel4dynamics365","dynamics365connector","2023-01-17","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","Dynamics365","Microsoft","Dynamics 365","The Dynamics 365 Common Data Service (CDS) activities connector provides insight into admin, user, and support activities, as well as Microsoft Social Engagement logging events. By connecting Dynamics 365 CRM logs into Microsoft Sentinel, you can view this data in workbooks, use it to create custom alerts, and improve your investigation process. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com//fwlink/p/?linkid=2226719&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""description"": ""Connect [Dynamics 365 CRM](https://aka.ms/Sentinel/Dynamics365) activity logs to your Microsoft Sentinel workspace."", ""instructions"": [{""parameters"": {""connectorKind"": ""Dynamics365"", ""title"": ""Dynamics365"", ""enable"": true}, ""type"": ""SentinelResourceProvider""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Tenant Permissions"", ""description"": ""'Security Administrator' or 'Global Administrator' on the workspace's tenant.""}, {""name"": ""License"", ""description"": ""[Microsoft Dynamics 365 production license](https://docs.microsoft.com/office365/servicedescriptions/microsoft-dynamics-365-online-service-description) (This connector is available for production environments only, not for sandbox). Also, a Microsoft 365 Enterprise [E3 or E5](https://docs.microsoft.com/power-platform/admin/enable-use-comprehensive-auditing#requirements) subscription is required for Activity Logging.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dynamics%20365/Data%20Connectors/template_Dynamics365.json","true" -"","Dynatrace","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dynatrace","dynatrace","dynatrace_azure_sentinel","2022-10-18","2023-10-16","","Dynatrace","Partner","https://www.dynatrace.com/services-support/","","domains","","","","","","","false","","false" -"DynatraceAttacks_CL","Dynatrace","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dynatrace","dynatrace","dynatrace_azure_sentinel","2022-10-18","2023-10-16","","Dynatrace","Partner","https://www.dynatrace.com/services-support/","","domains","DynatraceAttacks","Dynatrace","Dynatrace Attacks","This connector uses the Dynatrace Attacks REST API to ingest detected attacks into Microsoft Sentinel Log Analytics","[{""title"": ""Dynatrace Attack Events to Microsoft Sentinel"", ""description"": ""Configure and Enable Dynatrace [Application Security](https://www.dynatrace.com/platform/application-security/). \n Follow [these instructions](https://docs.dynatrace.com/docs/shortlink/token#create-api-token) to generate an access token."", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""Dynatrace tenant (ex. xyz.dynatrace.com)"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{dynatraceEnvironmentUrl}}""}]}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Dynatrace tenant (ex. xyz.dynatrace.com)"", ""description"": ""You need a valid Dynatrace tenant with [Application Security](https://www.dynatrace.com/platform/application-security/) enabled, learn more about the [Dynatrace platform](https://www.dynatrace.com/).""}, {""name"": ""Dynatrace Access Token"", ""description"": ""You need a Dynatrace Access Token, the token should have ***Read attacks*** (attacks.read) scope.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dynatrace/Data%20Connectors/Connector_Dynatrace_Attacks.json","true" -"DynatraceAuditLogs_CL","Dynatrace","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dynatrace","dynatrace","dynatrace_azure_sentinel","2022-10-18","2023-10-16","","Dynatrace","Partner","https://www.dynatrace.com/services-support/","","domains","DynatraceAuditLogs","Dynatrace","Dynatrace Audit Logs","This connector uses the [Dynatrace Audit Logs REST API](https://docs.dynatrace.com/docs/dynatrace-api/environment-api/audit-logs) to ingest tenant audit logs into Microsoft Sentinel Log Analytics","[{""title"": ""Dynatrace Audit Log Events to Microsoft Sentinel"", ""description"": ""Enable Dynatrace Audit [Logging](https://docs.dynatrace.com/docs/shortlink/audit-logs#enable-audit-logging). \n Follow [these instructions](https://docs.dynatrace.com/docs/shortlink/token#create-api-token) to generate an access token."", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""Dynatrace tenant (ex. xyz.dynatrace.com)"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{dynatraceEnvironmentUrl}}""}]}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Dynatrace tenant (ex. xyz.dynatrace.com)"", ""description"": ""You need a valid Dynatrace Tenant, to learn more about the Dynatrace platform [Start your free trial](https://www.dynatrace.com/trial).""}, {""name"": ""Dynatrace Access Token"", ""description"": ""You need a Dynatrace Access Token, the token should have ***Read audit logs*** (auditLogs.read) scope.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dynatrace/Data%20Connectors/Connector_Dynatrace_AuditLogs.json","true" -"DynatraceProblems_CL","Dynatrace","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dynatrace","dynatrace","dynatrace_azure_sentinel","2022-10-18","2023-10-16","","Dynatrace","Partner","https://www.dynatrace.com/services-support/","","domains","DynatraceProblems","Dynatrace","Dynatrace Problems","This connector uses the [Dynatrace Problem REST API](https://docs.dynatrace.com/docs/dynatrace-api/environment-api/problems-v2) to ingest problem events into Microsoft Sentinel Log Analytics","[{""title"": ""Dynatrace Problem Events to Microsoft Sentinel"", ""description"": ""Follow [these instructions](https://docs.dynatrace.com/docs/shortlink/token#create-api-token) to generate an access token."", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""Dynatrace tenant (ex. xyz.dynatrace.com)"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{dynatraceEnvironmentUrl}}""}]}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Dynatrace tenant (ex. xyz.dynatrace.com)"", ""description"": ""You need a valid Dynatrace Tenant, to learn more about the Dynatrace platform [Start your free trial](https://www.dynatrace.com/trial).""}, {""name"": ""Dynatrace Access Token"", ""description"": ""You need a Dynatrace Access Token, the token should have ***Read problems*** (problems.read) scope.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dynatrace/Data%20Connectors/Connector_Dynatrace_Problems.json","true" -"DynatraceSecurityProblems_CL","Dynatrace","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dynatrace","dynatrace","dynatrace_azure_sentinel","2022-10-18","2023-10-16","","Dynatrace","Partner","https://www.dynatrace.com/services-support/","","domains","DynatraceRuntimeVulnerabilities","Dynatrace","Dynatrace Runtime Vulnerabilities","This connector uses the [Dynatrace Security Problem REST API](https://docs.dynatrace.com/docs/dynatrace-api/environment-api/application-security/vulnerabilities/get-vulnerabilities) to ingest detected runtime vulnerabilities into Microsoft Sentinel Log Analytics.","[{""title"": ""Dynatrace Vulnerabilities Events to Microsoft Sentinel"", ""description"": ""Configure and Enable Dynatrace [Application Security](https://www.dynatrace.com/platform/application-security/). \n Follow [these instructions](https://docs.dynatrace.com/docs/shortlink/token#create-api-token) to generate an access token."", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""Dynatrace tenant (ex. xyz.dynatrace.com)"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{dynatraceEnvironmentUrl}}""}]}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Dynatrace tenant (ex. xyz.dynatrace.com)"", ""description"": ""You need a valid Dynatrace tenant with [Application Security](https://www.dynatrace.com/platform/application-security/) enabled, learn more about the [Dynatrace platform](https://www.dynatrace.com/).""}, {""name"": ""Dynatrace Access Token"", ""description"": ""You need a Dynatrace Access Token, the token should have ***Read security problems*** (securityProblems.read) scope.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dynatrace/Data%20Connectors/Connector_Dynatrace_RuntimeVulnerabilities.json","true" -"","ESET Inspect","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ESET%20Inspect","esetresearch1579795941720","eset_enterprise_inspector_mss","2022-06-01","","","ESET Enterprise","Partner","https://www.eset.com/int/business/solutions/endpoint-detection-and-response/","","domains","","","","","","","false","","false" -"ESETInspect_CL","ESET Inspect","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ESET%20Inspect","esetresearch1579795941720","eset_enterprise_inspector_mss","2022-06-01","","","ESET Enterprise","Partner","https://www.eset.com/int/business/solutions/endpoint-detection-and-response/","","domains","ESETInspect","ESET Netherlands","ESET Inspect","This connector will ingest detections from [ESET Inspect](https://www.eset.com/int/business/solutions/xdr-extended-detection-and-response/) using the provided [REST API](https://help.eset.com/ei_navigate/latest/en-US/api.html). This API is present in ESET Inspect version 1.4 and later.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to ESET Inspect to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Step 1 - Create an API user"", ""description"": ""1. Log into the ESET PROTECT console with an administrator account, select the **More** tab and the **Users** subtab. \n2. Click on the **ADD NEW** button and add a **native user**.\n3. Create a new user for the API account. **Optional:** Select a **Home group** other than **All** to limit what detections are ingested. \n4. Under the **Permission Sets** tab, assign the **Inspect reviewer permission set**.\n4. Log out of the administrator account and log into the console with the new API credentials for validation, then log out of the API account. \n5.""}, {""title"": ""Step 2 - Copy Workspace ID and Key"", ""description"": "">**IMPORTANT:** Before deploying the ESET Inspect connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Step 3 - Deploy the Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the ESET Inspect connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESETInspect-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **API Username**, **API Password** , enter the **Inspect base URL** and the **first ID** to start ingesting detections from.\n - The defailt starting ID is **0**. This means that all detections will be ingested. \n - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labelled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Access to the ESET PROTECT console"", ""description"": ""Permissions to add users""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ESET%20Inspect/Data%20Connectors/ESETInspect_API_FunctionApp.json","true" -"","ESET Protect Platform","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ESET%20Protect%20Platform","eset","eset-protect-platform-solution","2024-10-29","2025-06-17","","ESET Enterprise Integrations","Partner","https://help.eset.com/eset_connect/en-US/integrations.html","","domains","","","","","","","false","","false" -"IntegrationTableIncidents_CL","ESET Protect Platform","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ESET%20Protect%20Platform","eset","eset-protect-platform-solution","2024-10-29","2025-06-17","","ESET Enterprise Integrations","Partner","https://help.eset.com/eset_connect/en-US/integrations.html","","domains","ESETProtectPlatform","ESET","ESET Protect Platform","The ESET Protect Platform data connector enables users to inject detections data from [ESET Protect Platform](https://www.eset.com/int/business/protect-platform/) using the provided [Integration REST API](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ESET%20Protect%20Platform/Data%20Connectors). Integration REST API runs as scheduled Azure Function App.","[{""description"": "">**NOTE:** The ESET Protect Platform data connector uses Azure Functions to connect to the ESET Protect Platform via Eset Connect API to pull detections logs into Microsoft Sentinel. This process might result in additional data ingestion costs. See details on the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/).""}, {""description"": "">**NOTE:** The newest version of the ESET PROTECT Platform and Microsoft Sentinel integration pulls not only detections logs but also newly created incidents. If your integration was set up before 20.06.2025, please follow [these steps](https://help.eset.com/eset_connect/en-US/update_ms_sentinel_integration.html) to update it.""}, {""title"": ""Step 1 - Create an API user"", ""description"": ""Use this [instruction](https://help.eset.com/eset_connect/en-US/create_api_user_account.html) to create an ESET Connect API User account with **Login** and **Password**.""}, {""title"": ""Step 2 - Create a registered application"", ""description"": ""Create a Microsoft Entra ID registered application by following the steps in the [Register a new application instruction.](https://learn.microsoft.com/en-us/entra/identity-platform/quickstart-register-app)""}, {""title"": ""Step 3 - Deploy the ESET Protect Platform data connector using the Azure Resource Manager (ARM) template"", ""description"": ""\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-EsetProtectionPlatform-azuredeploy)\n\n2. Select the name of the **Log Analytics workspace** associated with your Microsoft Sentinel. Select the same **Resource Group** as the Resource Group of the Log Analytics workspace.\n\n3. Type the parameters of the registered application in Microsoft Entra ID: **Azure Client ID**, **Azure Client Secret**, **Azure Tenant ID**, **Object ID**. You can find the **Object ID** on Azure Portal by following this path \n> Microsoft Entra ID -> Manage (on the left-side menu) -> Enterprise applications -> Object ID column (the value next to your registered application name).\n\n4. Provide the ESET Connect API user account **Login** and **Password** obtained in **Step 1**.\n\n5. Select one or more ESET products (ESET PROTECT, ESET Inspect, ESET Cloud Office Security) from which detections are retrieved.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Permission to register an application in Microsoft Entra ID"", ""description"": ""Sufficient permissions to register an application with your Microsoft Entra tenant are required.""}, {""name"": ""Permission to assign a role to the registered application"", ""description"": ""Permission to assign the Monitoring Metrics Publisher role to the registered application in Microsoft Entra ID is required.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ESET%20Protect%20Platform/Data%20Connectors/ESETProtectPlatform_API_FunctionApp.json","true" -"IntegrationTable_CL","ESET Protect Platform","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ESET%20Protect%20Platform","eset","eset-protect-platform-solution","2024-10-29","2025-06-17","","ESET Enterprise Integrations","Partner","https://help.eset.com/eset_connect/en-US/integrations.html","","domains","ESETProtectPlatform","ESET","ESET Protect Platform","The ESET Protect Platform data connector enables users to inject detections data from [ESET Protect Platform](https://www.eset.com/int/business/protect-platform/) using the provided [Integration REST API](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ESET%20Protect%20Platform/Data%20Connectors). Integration REST API runs as scheduled Azure Function App.","[{""description"": "">**NOTE:** The ESET Protect Platform data connector uses Azure Functions to connect to the ESET Protect Platform via Eset Connect API to pull detections logs into Microsoft Sentinel. This process might result in additional data ingestion costs. See details on the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/).""}, {""description"": "">**NOTE:** The newest version of the ESET PROTECT Platform and Microsoft Sentinel integration pulls not only detections logs but also newly created incidents. If your integration was set up before 20.06.2025, please follow [these steps](https://help.eset.com/eset_connect/en-US/update_ms_sentinel_integration.html) to update it.""}, {""title"": ""Step 1 - Create an API user"", ""description"": ""Use this [instruction](https://help.eset.com/eset_connect/en-US/create_api_user_account.html) to create an ESET Connect API User account with **Login** and **Password**.""}, {""title"": ""Step 2 - Create a registered application"", ""description"": ""Create a Microsoft Entra ID registered application by following the steps in the [Register a new application instruction.](https://learn.microsoft.com/en-us/entra/identity-platform/quickstart-register-app)""}, {""title"": ""Step 3 - Deploy the ESET Protect Platform data connector using the Azure Resource Manager (ARM) template"", ""description"": ""\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-EsetProtectionPlatform-azuredeploy)\n\n2. Select the name of the **Log Analytics workspace** associated with your Microsoft Sentinel. Select the same **Resource Group** as the Resource Group of the Log Analytics workspace.\n\n3. Type the parameters of the registered application in Microsoft Entra ID: **Azure Client ID**, **Azure Client Secret**, **Azure Tenant ID**, **Object ID**. You can find the **Object ID** on Azure Portal by following this path \n> Microsoft Entra ID -> Manage (on the left-side menu) -> Enterprise applications -> Object ID column (the value next to your registered application name).\n\n4. Provide the ESET Connect API user account **Login** and **Password** obtained in **Step 1**.\n\n5. Select one or more ESET products (ESET PROTECT, ESET Inspect, ESET Cloud Office Security) from which detections are retrieved.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Permission to register an application in Microsoft Entra ID"", ""description"": ""Sufficient permissions to register an application with your Microsoft Entra tenant are required.""}, {""name"": ""Permission to assign a role to the registered application"", ""description"": ""Permission to assign the Monitoring Metrics Publisher role to the registered application in Microsoft Entra ID is required.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ESET%20Protect%20Platform/Data%20Connectors/ESETProtectPlatform_API_FunctionApp.json","true" -"","ESETPROTECT","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ESETPROTECT","cyberdefensegroupbv1625581149103","eset_protect","2021-10-20","","","ESET Netherlands","Partner","https://techcenter.eset.nl/en/","","domains","","","","","","","false","","false" -"Syslog","ESETPROTECT","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ESETPROTECT","cyberdefensegroupbv1625581149103","eset_protect","2021-10-20","","","ESET Netherlands","Partner","https://techcenter.eset.nl/en/","","domains","ESETPROTECT","ESET","[Deprecated] ESET PROTECT","This connector gathers all events generated by ESET software through the central management solution ESET PROTECT (formerly ESET Security Management Center). This includes Anti-Virus detections, Firewall detections but also more advanced EDR detections. For a complete list of events please refer to [the documentation](https://help.eset.com/protect_admin/latest/en-US/events-exported-to-json-format.html).","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias ESETPROTECT and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ESETPROTECT/Parsers/ESETPROTECT.txt).The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n2. Select **Apply below configuration to my machines** and select the facilities and severities. The default ESET PROTECT facility is **user**.\n3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure ESET PROTECT"", ""description"": ""Configure ESET PROTECT to send all events through Syslog.\n\n1. Follow [these instructions](https://help.eset.com/protect_admin/latest/en-US/admin_server_settings_syslog.html) to configure syslog output. Make sure to select **BSD** as the format and **TCP** as the transport.\n\n2. Follow [these instructions](https://help.eset.com/protect_admin/latest/en-US/admin_server_settings_export_to_syslog.html) to export all logs to syslog. Select **JSON** as the output format.\n\nNote:- Refer to the [documentation](https://learn.microsoft.com/en-us/azure/sentinel/connect-log-forwarder?tabs=rsyslog#security-considerations) for setting up the log forwarder for both local and cloud storage."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ESETPROTECT/Data%20Connectors/Connector_Syslog_ESETPROTECT.json","true" -"","EatonForeseer","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/EatonForeseer","azuresentinel","azure-sentinel-solution-eatonforeseer","2022-06-28","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"","EclecticIQ","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/EclecticIQ","azuresentinel","azure-sentinel-solution-eclecticiqtip","2022-09-30","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"","Egress Defend","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Egress%20Defend","egress1589289169584","azure-sentinel-solution-egress-defend","2023-07-27","","","egress1589289169584","Partner","https://support.egress.com/s/","","domains","","","","","","","false","","false" -"EgressDefend_CL","Egress Defend","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Egress%20Defend","egress1589289169584","azure-sentinel-solution-egress-defend","2023-07-27","","","egress1589289169584","Partner","https://support.egress.com/s/","","domains","EgressDefendPolling","Egress Software Technologies","Egress Defend","The Egress Defend audit connector provides the capability to ingest Egress Defend Data into Microsoft Sentinel.","[{""title"": ""Connect Egress Defend with Microsoft Sentinel"", ""description"": ""Enter your Egress Defend API URl, Egress Domain and API token."", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""API URL"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{apiUrl}}""}, {""displayText"": ""Domain name"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{domain}}""}]}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions on the Log Analytics workspace are required to enable the data connector."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true, ""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Egress API Token"", ""description"": ""An Egress API token is required to ingest audit records to Microsoft Sentinel.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Egress%20Defend/Data%20Connectors/DefendAPIConnector.json","true" -"","Egress Iris","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Egress%20Iris","egress1589289169584","egress-sentinel","2024-03-11","","","Egress Software Technologies Ltd","Partner","https://support.egress.com","","domains","","","","","","","false","","false" -"EgressEvents_CL","Egress Iris","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Egress%20Iris","egress1589289169584","egress-sentinel","2024-03-11","","","Egress Software Technologies Ltd","Partner","https://support.egress.com","","domains","EgressSiemPolling","Egress Software Technologies","Egress Iris Connector","The Egress Iris connector will allow you to ingest Egress data into Sentinel.","[{""title"": ""Connect Egress Data with Microsoft Sentinel"", ""description"": ""Enter your Egress API Hostname and secret."", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""Hostname"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{hostname}}""}]}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions on the Log Analytics workspace are required to enable the data connector."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true, ""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Egress API Token"", ""description"": ""An Egress API token is required to ingest audit records to Microsoft Sentinel.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Egress%20Iris/Data%20Connectors/EgressDataConnector.json","true" -"","Elastic Search","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Elastic%20Search","azuresentinel","azure-sentinel-solution-elasticsearch","2022-09-30","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"","ElasticAgent","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ElasticAgent","azuresentinel","azure-sentinel-solution-elasticagent","2021-11-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"ElasticAgentLogs_CL","ElasticAgent","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ElasticAgent","azuresentinel","azure-sentinel-solution-elasticagent","2021-11-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","ElasticAgent","Elastic","Elastic Agent","The [Elastic Agent](https://www.elastic.co/security) data connector provides the capability to ingest Elastic Agent logs, metrics, and security data into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**ElasticAgentEvent**](https://aka.ms/sentinel-ElasticAgent-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**NOTE:** This data connector has been developed using **Elastic Agent 7.14**."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Server where the Elastic Agent logs are forwarded.\n\n> Logs from Elastic Agents deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure Elastic Agent (Standalone)"", ""description"": ""[Follow the instructions](https://www.elastic.co/guide/en/fleet/current/elastic-agent-configuration.html) to configure Elastic Agent to output to Logstash""}, {""title"": ""3. Configure Logstash to use Microsoft Logstash Output Plugin"", ""description"": ""Follow the steps to configure Logstash to use microsoft-logstash-output-azure-loganalytics plugin:\n\n3.1) Check if the plugin is already installed:\n> ./logstash-plugin list | grep 'azure-loganalytics'\n**(if the plugin is installed go to step 3.3)**\n\n3.2) Install plugin:\n> ./logstash-plugin install microsoft-logstash-output-azure-loganalytics\n\n3.3) [Configure Logstash](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/microsoft-logstash-output-azure-loganalytics) to use the plugin""}, {""title"": ""4. Validate log ingestion"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using custom table specified in step 3.3 (e.g. ElasticAgentLogs_CL).\n\n>It may take about 30 minutes until the connection streams data to your workspace.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Include custom pre-requisites if the connectivity requires - else delete customs"", ""description"": ""Description for any custom pre-requisite""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ElasticAgent/Data%20Connectors/Connector_ElasticAgent.json","true" -"","Endace","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Endace","azuresentinel","azure-sentinel-solution-endace","2025-03-24","","","Endace","Partner","https://endace.com","","domains","","","","","","","false","","false" -"","Endpoint Threat Protection Essentials","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Endpoint%20Threat%20Protection%20Essentials","azuresentinel","azure-sentinel-solution-endpointthreat","2022-11-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"","Entrust identity as Service","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Entrust%20identity%20as%20Service","azuresentinel","azure-sentinel-solution-entrustidentity","2023-05-22","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"","Ermes Browser Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Ermes%20Browser%20Security","ermes","azure-sentinel-solution-ermes-browser-security","2023-09-29","","","Ermes Cyber Security S.p.A.","Partner","https://www.ermes.company","","domains","","","","","","","false","","false" -"ErmesBrowserSecurityEvents_CL","Ermes Browser Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Ermes%20Browser%20Security","ermes","azure-sentinel-solution-ermes-browser-security","2023-09-29","","","Ermes Cyber Security S.p.A.","Partner","https://www.ermes.company","","domains","ErmesBrowserSecurityEvents","Ermes Cyber Security S.p.A.","Ermes Browser Security Events","Ermes Browser Security Events","[{""description"": ""Connect using OAuth2 credentials"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""API URL (optional)"", ""placeholder"": ""https://api.shield.ermessecurity.com"", ""type"": ""text"", ""name"": ""apiUrl"", ""validations"": {""required"": false}}}, {""type"": ""OAuthForm"", ""parameters"": {""clientIdLabel"": ""Client ID"", ""clientSecretLabel"": ""Client Secret"", ""connectButtonLabel"": ""Connect"", ""disconnectButtonLabel"": ""Disconnect""}}], ""title"": ""Connect Ermes Browser Security Events to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Ermes Client Id and Client Secret"", ""description"": ""Enable API access in Ermes. Please contact [Ermes Cyber Security](https://www.ermes.company) support for more information.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Ermes%20Browser%20Security/Data%20Connectors/ErmesBrowserSecurityEvents_CCF/ErmesBrowserSecurityEvents_ConnectorDefinition.json","true" -"","Eset Security Management Center","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Eset%20Security%20Management%20Center","esetresearch1579795941720","Eset_Security_Management_Center_MSS","2022-05-11","","","Eset","Partner","https://support.eset.com/en","","domains","","","","","","","false","","false" -"eset_CL","Eset Security Management Center","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Eset%20Security%20Management%20Center","esetresearch1579795941720","Eset_Security_Management_Center_MSS","2022-05-11","","","Eset","Partner","https://support.eset.com/en","","domains","EsetSMC","Eset","Eset Security Management Center","Connector for [Eset SMC](https://help.eset.com/esmc_admin/72/en-US/) threat events, audit logs, firewall events and web sites filter.","[{""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure rsyslog to accept logs from your Eset SMC IP address.\n\n```\nsudo -i\r\n\r\n# Set ESET SMC source IP address\r\nexport ESETIP={Enter your IP address}\r\n\r\n# Create rsyslog configuration file\r\ncat > /etc/rsyslog.d/80-remote.conf << EOF\r\n\\$ModLoad imudp\r\n\\$UDPServerRun 514\r\n\\$ModLoad imtcp\r\n\\$InputTCPServerRun 514\r\n\\$AllowedSender TCP, 127.0.0.1, $ESETIP\r\n\\$AllowedSender UDP, 127.0.0.1, $ESETIP\r\nuser.=alert;user.=crit;user.=debug;user.=emerg;user.=err;user.=info;user.=notice;user.=warning @127.0.0.1:25224\r\nEOF\r\n\r\n# Restart rsyslog\r\nsystemctl restart rsyslog```""}, {""title"": ""3. Configure OMS agent to pass Eset SMC data in API format"", ""description"": ""In order to easily recognize Eset data we will push it to separate table and parse at agent so query in Azure Sentinel is easier and fast. To make it simple we will just modify ```match oms.**``` section to send data as API objects by changing type to out_oms_api. Modify file on /etc/opt/microsoft/omsagent/{REPLACEyourworkspaceid}/conf/omsagent.conf. Full ```match oms.**``` section looks like this:\r\n\r\n```\r\n\r\n type out_oms_api\r\n log_level info\r\n num_threads 5\r\n run_in_background false\r\n\r\n omsadmin_conf_path /etc/opt/microsoft/omsagent/{REPLACEyourworkspaceid}/conf/omsadmin.conf\r\n cert_path /etc/opt/microsoft/omsagent/{REPLACEyourworkspaceid}/certs/oms.crt\r\n key_path /etc/opt/microsoft/omsagent/{REPLACEyourworkspaceid}/certs/oms.key\r\n\r\n buffer_chunk_limit 15m\r\n buffer_type file\r\n buffer_path /var/opt/microsoft/omsagent/{REPLACEyourworkspaceid}/state/out_oms_common*.buffer\r\n\r\n buffer_queue_limit 10\r\n buffer_queue_full_action drop_oldest_chunk\r\n flush_interval 20s\r\n retry_limit 10\r\n retry_wait 30s\r\n max_retry_wait 9m\r\n\r\n```\r\n""}, {""title"": ""4. Change OMS agent configuration to catch tag oms.api.eset and parse structured data"", ""description"": ""Modify file /etc/opt/microsoft/omsagent/{REPLACEyourworkspaceid}/conf/omsagent.d/syslog.conf\n```\r\n\r\n type syslog\r\n port 25224\r\n bind 127.0.0.1\r\n protocol_type udp\r\n tag oms.api.eset\r\n\r\n\r\n\r\n @type parser\r\n key_name message\r\n format /(?.*?{.*})/\r\n\r\n\r\n\r\n @type parser\r\n key_name message\r\n format json\r\n\r\n```""}, {""title"": ""5. Disable automatic configuration and restart agent"", ""description"": ""```bash\r\n# Disable changes to configuration files from Portal\r\nsudo su omsagent -c 'python /opt/microsoft/omsconfig/Scripts/OMS_MetaConfigHelper.py --disable'\r\n\r\n# Restart agent\r\nsudo /opt/microsoft/omsagent/bin/service_control restart\r\n\r\n# Check agent logs\r\ntail -f /var/opt/microsoft/omsagent/log/omsagent.log\r\n```""}, {""title"": ""6. Configure Eset SMC to send logs to connector"", ""description"": ""Configure Eset Logs using BSD style and JSON format.\r\n- Go to Syslog server configuration as described in [Eset documentation](https://help.eset.com/esmc_admin/72/en-US/admin_server_settings.html?admin_server_settings_syslog.html) and configure Host (your connector), Format BSD, Transport TCP\r\n- Go to Logging section as described in [Eset documentation](https://help.eset.com/esmc_admin/72/en-US/admin_server_settings.html?admin_server_settings_export_to_syslog.html) and enable JSON""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Access to Eset SMC console"", ""description"": ""Permissions to configure log export""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Eset%20Security%20Management%20Center/Data%20Connectors/esetSmc.json","true" -"","Exabeam Advanced Analytics","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Exabeam%20Advanced%20Analytics","azuresentinel","azure-sentinel-solution-exabeamadvancedanalytics","2022-05-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"Syslog","Exabeam Advanced Analytics","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Exabeam%20Advanced%20Analytics","azuresentinel","azure-sentinel-solution-exabeamadvancedanalytics","2022-05-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","Exabeam","Exabeam","[Deprecated] Exabeam Advanced Analytics","The [Exabeam Advanced Analytics](https://www.exabeam.com/ueba/advanced-analytics-and-mitre-detect-and-stop-threats/) data connector provides the capability to ingest Exabeam Advanced Analytics events into Microsoft Sentinel. Refer to [Exabeam Advanced Analytics documentation](https://docs.exabeam.com/) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Exabeam Advanced Analytics and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Exabeam%20Advanced%20Analytics/Parsers/ExabeamEvent.txt), on the second line of the query, enter the hostname(s) of your Exabeam Advanced Analytics device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": """", ""description"": "">**NOTE:** This data connector has been developed using Exabeam Advanced Analytics i54 (Syslog)"", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the server where the Exabeam Advanced Analytic logs are generated or forwarded.\n\n> Logs from Exabeam Advanced Analytic deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the custom log directory to be collected"", ""instructions"": [{""parameters"": {""linkType"": ""OpenCustomLogsSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure Exabeam event forwarding to Syslog"", ""description"": ""[Follow these instructions](https://docs.exabeam.com/en/advanced-analytics/i56/advanced-analytics-administration-guide/125351-advanced-analytics.html#UUID-7ce5ff9d-56aa-93f0-65de-c5255b682a08) to send Exabeam Advanced Analytics activity log data via syslog.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Exabeam%20Advanced%20Analytics/Data%20Connectors/Connector_Exabeam_Syslog.json","true" -"","ExtraHop","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ExtraHop","extrahop","extrahop-revealx-sentinel","2025-02-11","2025-06-04","","ExtraHop Support","Partner","https://www.extrahop.com/customer-support","","domains","","","","","","","false","","false" -"ExtraHop_Detections_CL","ExtraHop","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ExtraHop","extrahop","extrahop-revealx-sentinel","2025-02-11","2025-06-04","","ExtraHop Support","Partner","https://www.extrahop.com/customer-support","","domains","ExtraHop","ExtraHop","ExtraHop Detections Data Connector","The [ExtraHop](https://extrahop.com/) Detections Data Connector enables you to import detection data from ExtraHop RevealX to Microsoft Sentinel through webhook payloads.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ExtraHop in which logs are pushed via ExtraHop webhook and it will ingest logs into Microsoft Sentinel. Furthermore, the connector will fetch the ingested data from the custom logs table and create Threat Intelligence Indicators into Microsoft Sentinel Threat Intelligence. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias **ExtraHopDetections** and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ExtraHop/Parsers/ExtraHopDetections.yaml). The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the ExtraHop Microsoft Sentinel data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Complete the following steps for automated deployment of the ExtraHop Detections Data Connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ExtraHop-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the values for the following parameters:\n\n\t a. **Function Name** - Enter the Function Name you want. \n\n\t b. **Workspace ID** - Enter the Workspace ID of the log analytics Workspace. \n\n\t c. **Workspace Key** - Enter the Workspace Key of the log analytics Workspace. \n\n\t d. **Detections Table Name** - Enter the name of the table used to store ExtraHop detection data. \n\n\t e. **LogLevel** - Select Debug, Info, Error, or Warning for the log level or log severity value. \n\n\t f. **AppInsightsWorkspaceResourceID** - Enter the value of the 'Log Analytic Workspace-->Properties-->Resource ID' property. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Complete the following steps to manually deploy the ExtraHop Detections Data Connector with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": ""1) Deploy a Function App"", ""description"": ""> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-ExtraHop-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ExtraHopXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": ""2) Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with the following respective values (case-sensitive): \n\n\t a. **Function Name** - Enter the Function Name you want. \n\n\t b. **Workspace ID** - Enter the Workspace ID of the log analytics Workspace. \n\n\tc. **Workspace Key** - Enter the Workspace Key of the log analytics Workspace. \n\n\td. **Detections Table Name** - Enter the name of the table used to store ExtraHop detection data. \n\n\te. **LogLevel** - Select Debug, Info, Error, or Warning for the log level or log severity value. \n\n\t f. **logAnalyticsUri (optional)** - Configure this option to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}, {""title"": """", ""description"": ""**STEP 2 - Post Deployment**\n\n""}, {""title"": ""1) Get the Function App endpoint"", ""description"": ""1. Go to the Azure function overview page and click the **\""Functions\""** tab.\n2. Click on the function called **\""ExtraHopHttpStarter\""**.\n3. Go to **\""GetFunctionurl\""** and copy the function url available under **\""default (Function key)\""**.\n4. Replace **{functionname}** with **\""ExtraHopDetectionsOrchestrator\""** in copied function url.""}, {""title"": ""2) Configure a connection to Microsoft Sentinel and specify webhook payload criteria from RevealX"", ""description"": ""From your ExtraHop system, configure the Microsoft Sentinel integration to establish a connection between Microsoft Sentinel and ExtraHop RevealX and to create detection notification rules that will send webhook data to Microsoft Sentinel. For detailed instructions, refer to [Integrate ExtraHop RevealX with Microsoft Sentinel SIEM](https://docs.extrahop.com/current/integrations-microsoft-sentinel-siem/).""}, {""title"": """", ""description"": ""*After notification rules have been configured and Microsoft Sentinel is receiving webhook data, the Function App is triggered and you can view ExtraHop detections from the Log Analytics workspace table named \""ExtraHop_Detections_CL\"".*\n\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ExtraHop RevealX permissions"", ""description"": ""The following is required on your ExtraHop RevealX system:\n 1.Your RevealX system must be running firmware version 9.9.2 or later.\n 2.Your RevealX system must be connected to ExtraHop Cloud Services.\n 3.Your user account must have System Administratin privileges on RevealX 360 or Full Write privileges on RevealX Enterprise.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ExtraHop/Data%20Connectors/ExtraHopDataConnector/ExtraHop_FunctionApp.json","true" -"","ExtraHop Reveal(x)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ExtraHop%20Reveal%28x%29","extrahop","extrahop_revealx_mss","2022-05-19","","","ExtraHop","Partner","https://www.extrahop.com/support/","","domains","","","","","","","false","","false" -"CommonSecurityLog","ExtraHop Reveal(x)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ExtraHop%20Reveal%28x%29","extrahop","extrahop_revealx_mss","2022-05-19","","","ExtraHop","Partner","https://www.extrahop.com/support/","","domains","ExtraHopNetworks","ExtraHop Networks","[Deprecated] ExtraHop Reveal(x) via Legacy Agent","The ExtraHop Reveal(x) data connector enables you to easily connect your Reveal(x) system with Microsoft Sentinel to view dashboards, create custom alerts, and improve investigation. This integration gives you the ability to gain insight into your organization's network and improve your security operation capabilities.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python --version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward ExtraHop Networks logs to Syslog agent"", ""description"": ""1. Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure to send the logs to port 514 TCP on the machine IP address.\n2. Follow the directions to install the [ExtraHop Detection SIEM Connector bundle](https://aka.ms/asi-syslog-extrahop-forwarding) on your Reveal(x) system. The SIEM Connector is required for this integration.\n3. Enable the trigger for **ExtraHop Detection SIEM Connector - CEF**\n4. Update the trigger with the ODS syslog targets you created\u00a0\n5. The Reveal(x) system formats syslog messages in Common Event Format (CEF) and then sends data to Microsoft Sentinel.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python --version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""ExtraHop"", ""description"": ""ExtraHop Discover or Command appliance with firmware version 7.8 or later with a user account that has Unlimited (administrator) privileges.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ExtraHop%20Reveal%28x%29/Data%20Connectors/template_ExtraHopNetworks.json","true" -"CommonSecurityLog","ExtraHop Reveal(x)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ExtraHop%20Reveal%28x%29","extrahop","extrahop_revealx_mss","2022-05-19","","","ExtraHop","Partner","https://www.extrahop.com/support/","","domains","ExtraHopNetworksAma","ExtraHop Networks","[Deprecated] ExtraHop Reveal(x) via AMA","The ExtraHop Reveal(x) data connector enables you to easily connect your Reveal(x) system with Microsoft Sentinel to view dashboards, create custom alerts, and improve investigation. This integration gives you the ability to gain insight into your organization's network and improve your security operation capabilities.","[{""title"": """", ""description"": """", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine""}, {""title"": ""Step B. Forward ExtraHop Networks logs to Syslog agent"", ""description"": ""1. Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure to send the logs to port 514 TCP on the machine IP address.\n2. Follow the directions to install the [ExtraHop Detection SIEM Connector bundle](https://aka.ms/asi-syslog-extrahop-forwarding) on your Reveal(x) system. The SIEM Connector is required for this integration.\n3. Enable the trigger for **ExtraHop Detection SIEM Connector - CEF**\n4. Update the trigger with the ODS syslog targets you created\u00a0\n5. The Reveal(x) system formats syslog messages in Common Event Format (CEF) and then sends data to Microsoft Sentinel.""}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ExtraHop%20Reveal%28x%29/Data%20Connectors/template_ExtraHopReveal%28x%29AMA.json","true" -"","F5 BIG-IP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/F5%20BIG-IP","f5-networks","f5_bigip_mss","2022-05-25","","","F5 Networks","Partner","https://support.f5.com/csp/home","","domains","","","","","","","false","","false" -"F5Telemetry_ASM_CL","F5 BIG-IP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/F5%20BIG-IP","f5-networks","f5_bigip_mss","2022-05-25","","","F5 Networks","Partner","https://support.f5.com/csp/home","","domains","F5BigIp","F5 Networks","F5 BIG-IP","The F5 firewall connector allows you to easily connect your F5 logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": ""Configure and connect F5 BIGIP"", ""description"": ""To connect your F5 BIGIP, you have to post a JSON declaration to the system\u2019s API endpoint. For instructions on how to do this, see [Integrating the F5 BGIP with Microsoft Sentinel](https://aka.ms/F5BigIp-Integrate)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/F5%20BIG-IP/Data%20Connectors/F5BigIp.json","true" -"F5Telemetry_LTM_CL","F5 BIG-IP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/F5%20BIG-IP","f5-networks","f5_bigip_mss","2022-05-25","","","F5 Networks","Partner","https://support.f5.com/csp/home","","domains","F5BigIp","F5 Networks","F5 BIG-IP","The F5 firewall connector allows you to easily connect your F5 logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": ""Configure and connect F5 BIGIP"", ""description"": ""To connect your F5 BIGIP, you have to post a JSON declaration to the system\u2019s API endpoint. For instructions on how to do this, see [Integrating the F5 BGIP with Microsoft Sentinel](https://aka.ms/F5BigIp-Integrate)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/F5%20BIG-IP/Data%20Connectors/F5BigIp.json","true" -"F5Telemetry_system_CL","F5 BIG-IP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/F5%20BIG-IP","f5-networks","f5_bigip_mss","2022-05-25","","","F5 Networks","Partner","https://support.f5.com/csp/home","","domains","F5BigIp","F5 Networks","F5 BIG-IP","The F5 firewall connector allows you to easily connect your F5 logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": ""Configure and connect F5 BIGIP"", ""description"": ""To connect your F5 BIGIP, you have to post a JSON declaration to the system\u2019s API endpoint. For instructions on how to do this, see [Integrating the F5 BGIP with Microsoft Sentinel](https://aka.ms/F5BigIp-Integrate)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/F5%20BIG-IP/Data%20Connectors/F5BigIp.json","true" -"","F5 Networks","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/F5%20Networks","f5-networks","f5_networks_data_mss","2022-05-12","","","F5","Partner","https://www.f5.com/services/support","","domains","","","","","","","false","","false" -"CommonSecurityLog","F5 Networks","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/F5%20Networks","f5-networks","f5_networks_data_mss","2022-05-12","","","F5","Partner","https://www.f5.com/services/support","","domains","F5","F5 Networks","[Deprecated] F5 Networks via Legacy Agent","The F5 firewall connector allows you to easily connect your F5 logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python --version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Configure F5 to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent.\n\nGo to [F5 Configuring Application Security Event Logging](https://aka.ms/asi-syslog-f5-forwarding), follow the instructions to set up remote logging, using the following guidelines:\n\n1. Set the **Remote storage type** to CEF.\n2. Set the **Protocol setting** to UDP.\n3. Set the **IP address** to the Syslog server IP address.\n4. Set the **port number** to 514, or the port your agent uses.\n5. Set the **facility** to the one that you configured in the Syslog agent (by default, the agent sets this to local4).\n6. You can set the **Maximum Query String Size** to be the same as you configured.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python --version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/F5%20Networks/Data%20Connectors/template_F5.json","true" -"CommonSecurityLog","F5 Networks","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/F5%20Networks","f5-networks","f5_networks_data_mss","2022-05-12","","","F5","Partner","https://www.f5.com/services/support","","domains","F5Ama","F5 Networks","[Deprecated] F5 Networks via AMA","The F5 firewall connector allows you to easily connect your F5 logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": """", ""description"": """", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine""}, {""title"": ""Step B. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Configure F5 to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent.\n\nGo to [F5 Configuring Application Security Event Logging](https://aka.ms/asi-syslog-f5-forwarding), follow the instructions to set up remote logging, using the following guidelines:\n\n1. Set the **Remote storage type** to CEF.\n2. Set the **Protocol setting** to UDP.\n3. Set the **IP address** to the Syslog server IP address.\n4. Set the **port number** to 514, or the port your agent uses.\n5. Set the **facility** to the one that you configured in the Syslog agent (by default, the agent sets this to local4).\n6. You can set the **Maximum Query String Size** to be the same as you configured.""}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/F5%20Networks/Data%20Connectors/template_F5NetworksAMA.json","true" -"","FalconFriday","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/FalconFriday","falconforcebv1623147592118","falconfriday_content","2021-10-18","","","FalconForce","Partner","https://www.falconforce.nl/en/","","domains","","","","","","","false","","false" -"","Farsight DNSDB","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Farsight%20DNSDB","","","","","","","","","","","","","","","","","false","","false" -"","Feedly","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Feedly","feedlyinc1693853810319","azure-sentinel-solution-feedly","2023-08-01","","","Feedly Inc","Partner","https://feedly.com/i/support/contactUs","","domains","","","","","","","false","","false" -"feedly_indicators_CL","Feedly","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Feedly","feedlyinc1693853810319","azure-sentinel-solution-feedly","2023-08-01","","","Feedly Inc","Partner","https://feedly.com/i/support/contactUs","","domains","Feedly","Feedly","Feedly","This connector allows you to ingest IoCs from Feedly.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions and the Logs Ingestion API to pull IoCs from Feedly into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": ""Step 1 - Prepare Your Environment"", ""description"": ""The Feedly connector will automatically create:\n\n- **Custom Table**: `feedly_indicators_CL` with the required schema\n- **Data Collection Endpoint (DCE)**: For ingesting data\n- **Data Collection Rule (DCR)**: For processing and routing data\n\nNo manual resource creation is required - everything will be created during deployment!\n\nFor detailed instructions, see: [Migrate from HTTP Data Collector API to Logs Ingestion API](https://learn.microsoft.com/azure/azure-monitor/logs/custom-logs-migrate)""}, {""title"": ""Step 2 - Deploy the Connector"", ""description"": ""The ARM template will automatically:\n\n1. Create a managed identity for the Azure Function\n2. Assign the **Monitoring Metrics Publisher** role to the Function App on the DCR\n3. Configure all necessary permissions for data ingestion\n\nNo manual role assignments are required - everything is handled automatically during deployment!""}, {""title"": ""Step 3 - Get your Feedly API token"", ""description"": ""Go to https://feedly.com/i/team/api and generate a new API token for the connector.""}, {""title"": ""(Optional Step) Securely store credentials in Azure Key Vault"", ""description"": ""Azure Key Vault provides a secure mechanism to store and retrieve secrets. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App by using the `@Microsoft.KeyVault(SecretUri={Security Identifier})` schema.""}, {""title"": ""Step 4 - Deploy the connector"", ""description"": ""Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function\n\n>**IMPORTANT:** Before deploying, gather the following information:\n- Feedly API Token and Stream IDs\n\nAll Azure Monitor resources (DCE, DCR, custom table, and role assignments) will be created automatically during deployment."", ""instructions"": [], ""innerSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Feedly connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-Feedly-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the required parameters:\n - **TableName**: Name for the custom table (default: `feedly_indicators_CL`)\n - **FeedlyApiKey**: Your Feedly API token from Step 3\n - **FeedlyStreamIds**: Comma-separated list of Feedly stream IDs\n - **DaysToBackfill**: Number of days to backfill (default: 7)\n\n>**Note**: If using Azure Key Vault secrets, use the `@Microsoft.KeyVault(SecretUri={Security Identifier})` schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Feedly connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""innerSteps"": [{""title"": ""1. Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://github.com/Azure/Azure-Sentinel/raw/refs/heads/master/Solutions/Feedly/Data%20Connectors/FeedlyAzureFunction.zip) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity Bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. FeedlyXXXX).\n\n\te. **Select a runtime:** Choose Python 3.10.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": ""2. Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive):\n\t\t- `DataCollectionEndpoint`: Will be populated automatically after DCE creation\n\t\t- `DcrImmutableId`: Will be populated automatically after DCR creation\n\t\t- `DcrStreamName`: `feedly_indicators_CL`\n\t\t- `FeedlyApiKey`: Your Feedly API token\n\t\t- `FeedlyStreamIds`: Comma-separated Feedly stream IDs\n\t\t- `DaysToBackfill`: Number of days to backfill (e.g., 7)\n\n**Note**: The Function App uses managed identity for authentication to Azure Monitor, so no Azure AD credentials are needed.\n\n>**Note**: Use Azure Key Vault references for sensitive values: `@Microsoft.KeyVault(SecretUri={Security Identifier})`\n\n4. Once all application settings have been entered, click **Save**.""}]}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Azure AD Application Registration"", ""description"": ""An Azure AD App Registration with client credentials and permissions to write to the Data Collection Rule. The application must be granted 'Monitoring Metrics Publisher' role on the DCR.""}, {""name"": ""Data Collection Endpoint and Rule"", ""description"": ""A Data Collection Endpoint (DCE) and Data Collection Rule (DCR) must be created before deploying this connector. [See the documentation to learn more](https://learn.microsoft.com/azure/azure-monitor/logs/custom-logs-migrate).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Feedly/Data%20Connectors/Feedly_API_AzureFunctionApp.json","true" -"","FireEye Network Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/FireEye%20Network%20Security","azuresentinel","azure-sentinel-solution-fireeyenx","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"CommonSecurityLog","FireEye Network Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/FireEye%20Network%20Security","azuresentinel","azure-sentinel-solution-fireeyenx","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","FireEyeNX","FireEye","[Deprecated] FireEye Network Security (NX) via Legacy Agent","The [FireEye Network Security (NX)](https://www.fireeye.com/products/network-security.html) data connector provides the capability to ingest FireEye Network Security logs into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**FireEyeNXEvent**](https://aka.ms/sentinel-FireEyeNX-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**NOTE:** This data connector has been developed using FEOS release v9.0"", ""instructions"": []}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Configure FireEye NX to send logs using CEF"", ""description"": ""Complete the following steps to send data using CEF:\n\n2.1. Log into the FireEye appliance with an administrator account\n\n2.2. Click **Settings**\n\n2.3. Click **Notifications**\n\nClick **rsyslog**\n\n2.4. Check the **Event type** check box\n\n2.5. Make sure Rsyslog settings are:\n\n- Default format: CEF\n\n- Default delivery: Per event\n\n- Default send as: Alert""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/FireEye%20Network%20Security/Data%20Connectors/Connector_FireEyeNX_CEF.json","true" -"CommonSecurityLog","FireEye Network Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/FireEye%20Network%20Security","azuresentinel","azure-sentinel-solution-fireeyenx","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","FireEyeNXAma","FireEye","[Deprecated] FireEye Network Security (NX) via AMA","The [FireEye Network Security (NX)](https://www.fireeye.com/products/network-security.html) data connector provides the capability to ingest FireEye Network Security logs into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**FireEyeNXEvent**](https://aka.ms/sentinel-FireEyeNX-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Configure FireEye NX to send logs using CEF"", ""description"": ""Complete the following steps to send data using CEF:\n\n2.1. Log into the FireEye appliance with an administrator account\n\n2.2. Click **Settings**\n\n2.3. Click **Notifications**\n\nClick **rsyslog**\n\n2.4. Check the **Event type** check box\n\n2.5. Make sure Rsyslog settings are:\n\n- Default format: CEF\n\n- Default delivery: Per event\n\n- Default send as: Alert"", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/FireEye%20Network%20Security/Data%20Connectors/template_FireEyeNX_CEFAMA.json","true" -"","Flare","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Flare","flaresystmesinc1617114736428","flare-systems-firework-sentinel","2021-10-20","","","Flare","Partner","https://flare.io/company/contact/","","domains","","","","","","","false","","false" -"Firework_CL","Flare","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Flare","flaresystmesinc1617114736428","flare-systems-firework-sentinel","2021-10-20","","","Flare","Partner","https://flare.io/company/contact/","","domains","Flare","Flare","Flare","[Flare](https://flare.systems/platform/) connector allows you to receive data and intelligence from Flare on Microsoft Sentinel.","[{""title"": ""1. Creating an Alert Channel for Microsoft Sentinel"", ""description"": """", ""innerSteps"": [{""description"": ""As an organization administrator, authenticate on [Flare](https://app.flare.systems) and access the [team page](https://app.flare.systems#/team) to create a new alert channel.""}, {""description"": ""Click on 'Create a new alert channel' and select 'Microsoft Sentinel'. Enter your Shared Key And WorkspaceID. Save the Alert Channel. \n For more help and details, see our [Azure configuration documentation](https://docs.microsoft.com/azure/sentinel/connect-data-sources)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID"", ""value"": ""{0}""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary key"", ""value"": ""{0} ""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Associating your alert channel to an alert feed"", ""innerSteps"": [{""description"": ""At this point, you may configure alerts to be sent to Microsoft Sentinel the same way that you would configure regular email alerts.""}, {""description"": ""For a more detailed guide, refer to the Flare documentation.""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Required Flare permissions"", ""description"": ""only Flare organization administrators may configure the Microsoft Sentinel integration.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Flare/Data%20Connectors/Connector_REST_API_FlareSystemsFirework.json","true" -"","Forcepoint CASB","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20CASB","microsoftsentinelcommunity","azure-sentinel-solution-forcepoint-casb","2022-05-19","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","","","","","","","false","","false" -"CommonSecurityLog","Forcepoint CASB","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20CASB","microsoftsentinelcommunity","azure-sentinel-solution-forcepoint-casb","2022-05-19","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ForcepointCasb","Forcepoint CASB","[Deprecated] Forcepoint CASB via Legacy Agent","The Forcepoint CASB (Cloud Access Security Broker) Connector allows you to automatically export CASB logs and events into Microsoft Sentinel in real-time. This enriches visibility into user activities across locations and cloud applications, enables further correlation with data from Azure workloads and other feeds, and improves monitoring capability with Workbooks inside Microsoft Sentinel.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel. This machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version \n \n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}, {""title"": ""5. Forcepoint integration installation guide "", ""description"": ""To complete the installation of this Forcepoint product integration, follow the guide linked below.\n\n[Installation Guide >](https://frcpnt.com/casb-sentinel)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20CASB/Data%20Connectors/Forcepoint%20CASB.json","true" -"CommonSecurityLog","Forcepoint CASB","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20CASB","microsoftsentinelcommunity","azure-sentinel-solution-forcepoint-casb","2022-05-19","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ForcepointCasbAma","Forcepoint CASB","[Deprecated] Forcepoint CASB via AMA","The Forcepoint CASB (Cloud Access Security Broker) Connector allows you to automatically export CASB logs and events into Microsoft Sentinel in real-time. This enriches visibility into user activities across locations and cloud applications, enables further correlation with data from Azure workloads and other feeds, and improves monitoring capability with Workbooks inside Microsoft Sentinel.","[{""title"": """", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine.""}, {""title"": ""Step B. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address.""}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}, {""title"": ""3. Forcepoint integration installation guide "", ""description"": ""To complete the installation of this Forcepoint product integration, follow the guide linked below.\n\n[Installation Guide >](https://frcpnt.com/casb-sentinel)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20CASB/Data%20Connectors/template_Forcepoint%20CASBAMA.json","true" -"","Forcepoint CSG","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20CSG","microsoftsentinelcommunity","azure-sentinel-solution-forcepoint-csg","2022-05-10","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","","","","","","","false","","false" -"CommonSecurityLog","Forcepoint CSG","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20CSG","microsoftsentinelcommunity","azure-sentinel-solution-forcepoint-csg","2022-05-10","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ForcepointCSG","Forcepoint","[Deprecated] Forcepoint CSG via Legacy Agent","Forcepoint Cloud Security Gateway is a converged cloud security service that provides visibility, control, and threat protection for users and data, wherever they are. For more information visit: https://www.forcepoint.com/product/cloud-security-gateway","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""This integration requires the Linux Syslog agent to collect your Forcepoint Cloud Security Gateway Web/Email logs on port 514 TCP as Common Event Format (CEF) and forward them to Microsoft Sentinel."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Your Data Connector Syslog Agent Installation Command is:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""2. Implementation options"", ""description"": ""The integration is made available with two implementations options."", ""innerSteps"": [{""title"": ""2.1 Docker Implementation"", ""description"": ""Leverages docker images where the integration component is already installed with all necessary dependencies.\n\nFollow the instructions provided in the Integration Guide linked below.\n\n[Integration Guide >](https://frcpnt.com/csg-sentinel)""}, {""title"": ""2.2 Traditional Implementation"", ""description"": ""Requires the manual deployment of the integration component inside a clean Linux machine.\n\nFollow the instructions provided in the Integration Guide linked below.\n\n[Integration Guide >](https://frcpnt.com/csg-sentinel)""}]}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version \n \n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF).""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20CSG/Data%20Connectors/ForcepointCloudSecurityGateway.json","true" -"CommonSecurityLog","Forcepoint CSG","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20CSG","microsoftsentinelcommunity","azure-sentinel-solution-forcepoint-csg","2022-05-10","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ForcepointCSGAma","Forcepoint","[Deprecated] Forcepoint CSG via AMA","Forcepoint Cloud Security Gateway is a converged cloud security service that provides visibility, control, and threat protection for users and data, wherever they are. For more information visit: https://www.forcepoint.com/product/cloud-security-gateway","[{""title"": """", ""description"": """", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine.""}, {""title"": ""Step B. Implementation options"", ""description"": ""The integration is made available with two implementations options."", ""innerSteps"": [{""title"": ""1. Docker Implementation"", ""description"": ""Leverages docker images where the integration component is already installed with all necessary dependencies.\n\nFollow the instructions provided in the Integration Guide linked below.\n\n[Integration Guide >](https://frcpnt.com/csg-sentinel)""}, {""title"": ""2. Traditional Implementation"", ""description"": ""Requires the manual deployment of the integration component inside a clean Linux machine.\n\nFollow the instructions provided in the Integration Guide linked below.\n\n[Integration Guide >](https://frcpnt.com/csg-sentinel)""}]}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF).""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20CSG/Data%20Connectors/template_ForcepointCloudSecurityGatewayAMA.json","true" -"","Forcepoint DLP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20DLP","microsoftsentinelcommunity","azure-sentinel-solution-forcepoint-dlp","2022-05-09","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","","","","","","","false","","false" -"ForcepointDLPEvents_CL","Forcepoint DLP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20DLP","microsoftsentinelcommunity","azure-sentinel-solution-forcepoint-dlp","2022-05-09","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","Forcepoint_DLP","Forcepoint","Forcepoint DLP","The Forcepoint DLP (Data Loss Prevention) connector allows you to automatically export DLP incident data from Forcepoint DLP into Microsoft Sentinel in real-time. This enriches visibility into user activities and data loss incidents, enables further correlation with data from Azure workloads and other feeds, and improves monitoring capability with Workbooks inside Microsoft Sentinel.","[{""title"": """", ""description"": ""Follow step by step instructions in the [Forcepoint DLP documentation for Microsoft Sentinel](https://frcpnt.com/dlp-sentinel) to configure this connector."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20DLP/Data%20Connectors/Forcepoint%20DLP.json","true" -"","Forcepoint NGFW","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20NGFW","microsoftsentinelcommunity","azure-sentinel-solution-forcepoint-ngfw","2022-05-25","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","","","","","","","false","","false" -"CommonSecurityLog","Forcepoint NGFW","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20NGFW","microsoftsentinelcommunity","azure-sentinel-solution-forcepoint-ngfw","2022-05-25","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ForcepointNgfw","Forcepoint","[Deprecated] Forcepoint NGFW via Legacy Agent","The Forcepoint NGFW (Next Generation Firewall) connector allows you to automatically export user-defined Forcepoint NGFW logs into Microsoft Sentinel in real-time. This enriches visibility into user activities recorded by NGFW, enables further correlation with data from Azure workloads and other feeds, and improves monitoring capability with Workbooks inside Microsoft Sentinel.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python - version \n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}, {""title"": ""5. Forcepoint integration installation guide "", ""description"": ""To complete the installation of this Forcepoint product integration, follow the guide linked below.\n\n[Installation Guide >](https://frcpnt.com/ngfw-sentinel)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20NGFW/Data%20Connectors/FORCEPOINT_NGFW.json","true" -"CommonSecurityLog","Forcepoint NGFW","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20NGFW","microsoftsentinelcommunity","azure-sentinel-solution-forcepoint-ngfw","2022-05-25","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ForcepointNgfwAma","Forcepoint","[Deprecated] Forcepoint NGFW via AMA","The Forcepoint NGFW (Next Generation Firewall) connector allows you to automatically export user-defined Forcepoint NGFW logs into Microsoft Sentinel in real-time. This enriches visibility into user activities recorded by NGFW, enables further correlation with data from Azure workloads and other feeds, and improves monitoring capability with Workbooks inside Microsoft Sentinel.","[{""title"": """", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine.""}, {""title"": ""Step B. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address.""}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}, {""title"": ""3. Forcepoint integration installation guide "", ""description"": ""To complete the installation of this Forcepoint product integration, follow the guide linked below.\n\n[Installation Guide >](https://frcpnt.com/ngfw-sentinel)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20NGFW/Data%20Connectors/template_FORCEPOINT_NGFWAMA.json","true" -"","Forescout (Legacy)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forescout%20%28Legacy%29","azuresentinel","azure-sentinel-solution-forescout","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"Syslog","Forescout (Legacy)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forescout%20%28Legacy%29","azuresentinel","azure-sentinel-solution-forescout","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","Forescout","Forescout","Forescout","The [Forescout](https://www.forescout.com/) data connector provides the capability to ingest [Forescout events](https://docs.forescout.com/bundle/syslog-3-6-1-h/page/syslog-3-6-1-h.How-to-Work-with-the-Syslog-Plugin.html) into Microsoft Sentinel. Refer to [Forescout documentation](https://docs.forescout.com/bundle/syslog-msg-3-6-tn/page/syslog-msg-3-6-tn.About-Syslog-Messages-in-Forescout.html) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**ForescoutEvent**](https://aka.ms/sentinel-forescout-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**NOTE:** This data connector has been developed using Forescout Syslog Plugin version: v3.6"", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Server where the Forescout logs are forwarded.\n\n> Logs from Forescout Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n 2. Select **Apply below configuration to my machines** and select the facilities and severities.\n 3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure Forescout event forwarding"", ""description"": ""Follow the configuration steps below to get Forescout logs into Microsoft Sentinel.\n1. [Select an Appliance to Configure.](https://docs.forescout.com/bundle/syslog-3-6-1-h/page/syslog-3-6-1-h.Select-an-Appliance-to-Configure.html)\n2. [Follow these instructions](https://docs.forescout.com/bundle/syslog-3-6-1-h/page/syslog-3-6-1-h.Send-Events-To-Tab.html#pID0E0CE0HA) to forward alerts from the Forescout platform to a syslog server.\n3. [Configure](https://docs.forescout.com/bundle/syslog-3-6-1-h/page/syslog-3-6-1-h.Syslog-Triggers.html) the settings in the Syslog Triggers tab.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forescout%20%28Legacy%29/Data%20Connectors/Forescout_syslog.json","true" -"","Forescout eyeInspect for OT Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forescout%20eyeInspect%20for%20OT%20Security","forescout","azure-sentinel-eyeinspectotsecurity","2025-07-10","","","Forescout Technologies","Partner","https://www.forescout.com/support","","domains","","","","","","","false","","false" -"ForescoutOtAlert_CL","Forescout eyeInspect for OT Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forescout%20eyeInspect%20for%20OT%20Security","forescout","azure-sentinel-eyeinspectotsecurity","2025-07-10","","","Forescout Technologies","Partner","https://www.forescout.com/support","","domains","Forescout_eyeInspect_for_OT_Security","Forescout","Forescout eyeInspect for OT Security","Forescout eyeInspect for OT Security connector allows you to connect Asset/Alert information from Forescout eyeInspect OT platform with Microsoft Sentinel, to view and analyze data using Log Analytics Tables and Workbooks. This gives you more insight into OT organization network and improves security operation capabilities.","[{""title"": ""Forescout eyeInspect OT Microsoft Sentinel Integration"", ""description"": ""Instructions on how to configure Forescout eyeInspect Microsoft Sentinel Integration are provided at Forescout eyeInspect Documentation Portal"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forescout%20eyeInspect%20for%20OT%20Security/Data%20Connectors/Forescout%20eyeInspect%20for%20OT%20Security.json","true" -"ForescoutOtAsset_CL","Forescout eyeInspect for OT Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forescout%20eyeInspect%20for%20OT%20Security","forescout","azure-sentinel-eyeinspectotsecurity","2025-07-10","","","Forescout Technologies","Partner","https://www.forescout.com/support","","domains","Forescout_eyeInspect_for_OT_Security","Forescout","Forescout eyeInspect for OT Security","Forescout eyeInspect for OT Security connector allows you to connect Asset/Alert information from Forescout eyeInspect OT platform with Microsoft Sentinel, to view and analyze data using Log Analytics Tables and Workbooks. This gives you more insight into OT organization network and improves security operation capabilities.","[{""title"": ""Forescout eyeInspect OT Microsoft Sentinel Integration"", ""description"": ""Instructions on how to configure Forescout eyeInspect Microsoft Sentinel Integration are provided at Forescout eyeInspect Documentation Portal"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forescout%20eyeInspect%20for%20OT%20Security/Data%20Connectors/Forescout%20eyeInspect%20for%20OT%20Security.json","true" -"","ForescoutHostPropertyMonitor","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ForescoutHostPropertyMonitor","forescout","azure-sentinel-solution-forescout","2022-06-28","","","Forescout Technologies","Partner","https://www.forescout.com/support","","domains","","","","","","","false","","false" -"ForescoutComplianceStatus_CL","ForescoutHostPropertyMonitor","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ForescoutHostPropertyMonitor","forescout","azure-sentinel-solution-forescout","2022-06-28","","","Forescout Technologies","Partner","https://www.forescout.com/support","","domains","ForescoutHostPropertyMonitor","Forescout","Forescout Host Property Monitor","The Forescout Host Property Monitor connector allows you to connect host/policy/compliance properties from Forescout platform with Microsoft Sentinel, to view, create custom incidents, and improve investigation. This gives you more insight into your organization network and improves your security operation capabilities.","[{""title"": """", ""description"": ""Instructions on how to configure Forescout Microsoft Sentinel plugin are provided at Forescout Documentation Portal (https://docs.forescout.com/bundle/microsoft-sentinel-module-v2-0-0-h)"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Forescout Plugin requirement"", ""description"": ""Please make sure Forescout Microsoft Sentinel plugin is running on Forescout platform""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ForescoutHostPropertyMonitor/Data%20Connectors/ForescoutHostPropertyMonitor.json","true" -"ForescoutHostProperties_CL","ForescoutHostPropertyMonitor","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ForescoutHostPropertyMonitor","forescout","azure-sentinel-solution-forescout","2022-06-28","","","Forescout Technologies","Partner","https://www.forescout.com/support","","domains","ForescoutHostPropertyMonitor","Forescout","Forescout Host Property Monitor","The Forescout Host Property Monitor connector allows you to connect host/policy/compliance properties from Forescout platform with Microsoft Sentinel, to view, create custom incidents, and improve investigation. This gives you more insight into your organization network and improves your security operation capabilities.","[{""title"": """", ""description"": ""Instructions on how to configure Forescout Microsoft Sentinel plugin are provided at Forescout Documentation Portal (https://docs.forescout.com/bundle/microsoft-sentinel-module-v2-0-0-h)"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Forescout Plugin requirement"", ""description"": ""Please make sure Forescout Microsoft Sentinel plugin is running on Forescout platform""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ForescoutHostPropertyMonitor/Data%20Connectors/ForescoutHostPropertyMonitor.json","true" -"ForescoutPolicyStatus_CL","ForescoutHostPropertyMonitor","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ForescoutHostPropertyMonitor","forescout","azure-sentinel-solution-forescout","2022-06-28","","","Forescout Technologies","Partner","https://www.forescout.com/support","","domains","ForescoutHostPropertyMonitor","Forescout","Forescout Host Property Monitor","The Forescout Host Property Monitor connector allows you to connect host/policy/compliance properties from Forescout platform with Microsoft Sentinel, to view, create custom incidents, and improve investigation. This gives you more insight into your organization network and improves your security operation capabilities.","[{""title"": """", ""description"": ""Instructions on how to configure Forescout Microsoft Sentinel plugin are provided at Forescout Documentation Portal (https://docs.forescout.com/bundle/microsoft-sentinel-module-v2-0-0-h)"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Forescout Plugin requirement"", ""description"": ""Please make sure Forescout Microsoft Sentinel plugin is running on Forescout platform""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ForescoutHostPropertyMonitor/Data%20Connectors/ForescoutHostPropertyMonitor.json","true" -"","ForgeRock Common Audit for CEF","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ForgeRock%20Common%20Audit%20for%20CEF","publisherid_test","offerid_test","2022-05-04","","","Forgerock","Partner","https://www.forgerock.com/support","","domains","","","","","","","false","","false" -"CommonSecurityLog","ForgeRock Common Audit for CEF","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ForgeRock%20Common%20Audit%20for%20CEF","publisherid_test","offerid_test","2022-05-04","","","Forgerock","Partner","https://www.forgerock.com/support","","domains","ForgeRock","ForgeRock Inc","[Deprecated] ForgeRock Identity Platform","The ForgeRock Identity Platform provides a single common auditing framework. Extract and aggregate log data across the entire platform with common audit (CAUD) event handlers and unique IDs so that it can be tracked holistically. Open and extensible, you can leverage audit logging and reporting capabilities for integration with Microsoft Sentinel via this CAUD for CEF connector.","[{""title"": ""Configuration for the ForgeRock Common Audit (CAUD) for Microsoft Sentinel"", ""description"": ""In ForgeRock, install and configure this Common Audit (CAUD) for Microsoft Sentinel per the documentation at https://github.com/javaservlets/SentinelAuditEventHandler. Next, in Azure, follow the below CEF steps.""}, {""title"": ""\n\n\n1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ForgeRock%20Common%20Audit%20for%20CEF/Data%20Connectors/ForgeRock_CEF.json","true" -"","Fortinet FortiGate Next-Generation Firewall connector for Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiGate%20Next-Generation%20Firewall%20connector%20for%20Microsoft%20Sentinel","azuresentinel","azure-sentinel-solution-fortinetfortigate","2021-08-13","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"CommonSecurityLog","Fortinet FortiGate Next-Generation Firewall connector for Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiGate%20Next-Generation%20Firewall%20connector%20for%20Microsoft%20Sentinel","azuresentinel","azure-sentinel-solution-fortinetfortigate","2021-08-13","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","Fortinet","Fortinet","[Deprecated] Fortinet via Legacy Agent","The Fortinet firewall connector allows you to easily connect your Fortinet logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python --version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py &&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Fortinet logs to Syslog agent"", ""description"": ""Set your Fortinet to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine\u2019s IP address.\n\n\nCopy the CLI commands below and:\n- Replace \""server <ip address>\"" with the Syslog agent's IP address.\n- Set the \""<facility_name>\"" to use the facility you configured in the Syslog agent (by default, the agent sets this to local4).\n- Set the Syslog port to 514, the port your agent uses.\n- To enable CEF format in early FortiOS versions, you may need to run the command \""set csv disable\"".\n\nFor more information, go to the [Fortinet Document Library](https://aka.ms/asi-syslog-fortinet-fortinetdocumentlibrary), choose your version, and use the \""Handbook\"" and \""Log Message Reference\"" PDFs.\n\n[Learn more >](https://aka.ms/CEF-Fortinet)"", ""instructions"": [{""parameters"": {""label"": ""Set up the connection using the CLI to run the following commands:"", ""value"": ""config log syslogd setting\n set status enable\nset format cef\nset port 514\nset server \nend"", ""rows"": 8}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python --version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py &&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiGate%20Next-Generation%20Firewall%20connector%20for%20Microsoft%20Sentinel/Data%20Connectors/Fortinet-FortiGate.json","true" -"CommonSecurityLog","Fortinet FortiGate Next-Generation Firewall connector for Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiGate%20Next-Generation%20Firewall%20connector%20for%20Microsoft%20Sentinel","azuresentinel","azure-sentinel-solution-fortinetfortigate","2021-08-13","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","FortinetAma","Fortinet","[Deprecated] Fortinet via AMA","The Fortinet firewall connector allows you to easily connect your Fortinet logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": """", ""description"": """", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine""}, {""title"": ""Step B. Forward Fortinet logs to Syslog agent"", ""description"": ""Set your Fortinet to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine\u2019s IP address.\n\n\nCopy the CLI commands below and:\n- Replace \""server <ip address>\"" with the Syslog agent's IP address.\n- Set the \""<facility_name>\"" to use the facility you configured in the Syslog agent (by default, the agent sets this to local4).\n- Set the Syslog port to 514, the port your agent uses.\n- To enable CEF format in early FortiOS versions, you may need to run the command \""set csv disable\"".\n\nFor more information, go to the [Fortinet Document Library](https://aka.ms/asi-syslog-fortinet-fortinetdocumentlibrary), choose your version, and use the \""Handbook\"" and \""Log Message Reference\"" PDFs.\n\n[Learn more >](https://aka.ms/CEF-Fortinet)"", ""instructions"": [{""parameters"": {""label"": ""Set up the connection using the CLI to run the following commands:"", ""value"": ""config log syslogd setting\n set status enable\nset format cef\nset port 514\nset server \nend"", ""rows"": 8}, ""type"": ""CopyableLabel""}]}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiGate%20Next-Generation%20Firewall%20connector%20for%20Microsoft%20Sentinel/Data%20Connectors/template_Fortinet-FortiGateAma.json","true" -"","Fortinet FortiNDR Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiNDR%20Cloud","fortinet","fortindrcloud-sentinel","2024-01-15","","","Fortinet","Partner","https://www.fortinet.com/support","","domains","","","","","","","false","","false" -"FncEventsDetections_CL","Fortinet FortiNDR Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiNDR%20Cloud","fortinet","fortindrcloud-sentinel","2024-01-15","","","Fortinet","Partner","https://www.fortinet.com/support","","domains","FortinetFortiNdrCloudDataConnector","Fortinet","Fortinet FortiNDR Cloud","The Fortinet FortiNDR Cloud data connector provides the capability to ingest [Fortinet FortiNDR Cloud](https://docs.fortinet.com/product/fortindr-cloud) data into Microsoft Sentinel using the FortiNDR Cloud API","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the FortiNDR Cloud API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/Fortinet%20FortiNDR%20Cloud/Parsers/Fortinet_FortiNDR_Cloud.md) to create the Kusto function alias **Fortinet_FortiNDR_Cloud**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Fortinet FortiNDR Cloud Logs Collection**\n\nThe provider should provide or link to detailed steps to configure the 'PROVIDER NAME APPLICATION NAME' API endpoint so that the Azure Function can authenticate to it successfully, get its authorization key or token, and pull the appliance's logs into Microsoft Sentinel.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Fortinet FortiNDR Cloud connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the as well as the FortiNDR Cloud API credentials (available in FortiNDR Cloud account management), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": ""**Azure Resource Manager (ARM) Template**\n\nUse this method for automated deployment of the Fortinet FortiNDR Cloud connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-FortinetFortiNDR-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**(Make sure using the same location as your Resource Group, and got the location supports Flex Consumption. \n3. Enter the **Workspace ID**, **Workspace Key**, **AwsAccessKeyId**, **AwsSecretAccessKey**, and/or Other required fields. \n4. Click **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""MetaStream Credentials"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **FortiNDR Cloud Account Code** are required to retrieve event data.""}, {""name"": ""API Credentials"", ""description"": ""**FortiNDR Cloud API Token**, **FortiNDR Cloud Account UUID** are required to retrieve detection data.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiNDR%20Cloud/Data%20Connectors/FortinetFortiNdrCloud_API_AzureFunctionApp.json","true" -"FncEventsObservation_CL","Fortinet FortiNDR Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiNDR%20Cloud","fortinet","fortindrcloud-sentinel","2024-01-15","","","Fortinet","Partner","https://www.fortinet.com/support","","domains","FortinetFortiNdrCloudDataConnector","Fortinet","Fortinet FortiNDR Cloud","The Fortinet FortiNDR Cloud data connector provides the capability to ingest [Fortinet FortiNDR Cloud](https://docs.fortinet.com/product/fortindr-cloud) data into Microsoft Sentinel using the FortiNDR Cloud API","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the FortiNDR Cloud API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/Fortinet%20FortiNDR%20Cloud/Parsers/Fortinet_FortiNDR_Cloud.md) to create the Kusto function alias **Fortinet_FortiNDR_Cloud**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Fortinet FortiNDR Cloud Logs Collection**\n\nThe provider should provide or link to detailed steps to configure the 'PROVIDER NAME APPLICATION NAME' API endpoint so that the Azure Function can authenticate to it successfully, get its authorization key or token, and pull the appliance's logs into Microsoft Sentinel.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Fortinet FortiNDR Cloud connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the as well as the FortiNDR Cloud API credentials (available in FortiNDR Cloud account management), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": ""**Azure Resource Manager (ARM) Template**\n\nUse this method for automated deployment of the Fortinet FortiNDR Cloud connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-FortinetFortiNDR-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**(Make sure using the same location as your Resource Group, and got the location supports Flex Consumption. \n3. Enter the **Workspace ID**, **Workspace Key**, **AwsAccessKeyId**, **AwsSecretAccessKey**, and/or Other required fields. \n4. Click **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""MetaStream Credentials"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **FortiNDR Cloud Account Code** are required to retrieve event data.""}, {""name"": ""API Credentials"", ""description"": ""**FortiNDR Cloud API Token**, **FortiNDR Cloud Account UUID** are required to retrieve detection data.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiNDR%20Cloud/Data%20Connectors/FortinetFortiNdrCloud_API_AzureFunctionApp.json","true" -"FncEventsSuricata_CL","Fortinet FortiNDR Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiNDR%20Cloud","fortinet","fortindrcloud-sentinel","2024-01-15","","","Fortinet","Partner","https://www.fortinet.com/support","","domains","FortinetFortiNdrCloudDataConnector","Fortinet","Fortinet FortiNDR Cloud","The Fortinet FortiNDR Cloud data connector provides the capability to ingest [Fortinet FortiNDR Cloud](https://docs.fortinet.com/product/fortindr-cloud) data into Microsoft Sentinel using the FortiNDR Cloud API","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the FortiNDR Cloud API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/Fortinet%20FortiNDR%20Cloud/Parsers/Fortinet_FortiNDR_Cloud.md) to create the Kusto function alias **Fortinet_FortiNDR_Cloud**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Fortinet FortiNDR Cloud Logs Collection**\n\nThe provider should provide or link to detailed steps to configure the 'PROVIDER NAME APPLICATION NAME' API endpoint so that the Azure Function can authenticate to it successfully, get its authorization key or token, and pull the appliance's logs into Microsoft Sentinel.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Fortinet FortiNDR Cloud connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the as well as the FortiNDR Cloud API credentials (available in FortiNDR Cloud account management), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": ""**Azure Resource Manager (ARM) Template**\n\nUse this method for automated deployment of the Fortinet FortiNDR Cloud connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-FortinetFortiNDR-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**(Make sure using the same location as your Resource Group, and got the location supports Flex Consumption. \n3. Enter the **Workspace ID**, **Workspace Key**, **AwsAccessKeyId**, **AwsSecretAccessKey**, and/or Other required fields. \n4. Click **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""MetaStream Credentials"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **FortiNDR Cloud Account Code** are required to retrieve event data.""}, {""name"": ""API Credentials"", ""description"": ""**FortiNDR Cloud API Token**, **FortiNDR Cloud Account UUID** are required to retrieve detection data.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiNDR%20Cloud/Data%20Connectors/FortinetFortiNdrCloud_API_AzureFunctionApp.json","true" -"","Fortinet FortiWeb Cloud WAF-as-a-Service connector for Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiWeb%20Cloud%20WAF-as-a-Service%20connector%20for%20Microsoft%20Sentinel","azuresentinel","azure-sentinel-solution-fortiwebcloud","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"CommonSecurityLog","Fortinet FortiWeb Cloud WAF-as-a-Service connector for Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiWeb%20Cloud%20WAF-as-a-Service%20connector%20for%20Microsoft%20Sentinel","azuresentinel","azure-sentinel-solution-fortiwebcloud","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","FortinetFortiWeb","Microsoft","[Deprecated] Fortinet FortiWeb Web Application Firewall via Legacy Agent","The [fortiweb](https://www.fortinet.com/products/web-application-firewall/fortiweb) data connector provides the capability to ingest Threat Analytics and events into Microsoft Sentinel.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiWeb%20Cloud%20WAF-as-a-Service%20connector%20for%20Microsoft%20Sentinel/Data%20Connectors/Fortiweb.json","true" -"CommonSecurityLog","Fortinet FortiWeb Cloud WAF-as-a-Service connector for Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiWeb%20Cloud%20WAF-as-a-Service%20connector%20for%20Microsoft%20Sentinel","azuresentinel","azure-sentinel-solution-fortiwebcloud","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","FortinetFortiWebAma","Microsoft","Fortinet FortiWeb Web Application Firewall via AMA","The [fortiweb](https://www.fortinet.com/products/web-application-firewall/fortiweb) data connector provides the capability to ingest Threat Analytics and events into Microsoft Sentinel.","[{""title"": """", ""description"": """", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine""}, {""title"": ""Step B. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address.""}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiWeb%20Cloud%20WAF-as-a-Service%20connector%20for%20Microsoft%20Sentinel/Data%20Connectors/template_FortiwebAma.json","true" -"","GDPR Compliance & Data Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GDPR%20Compliance%20%26%20Data%20Security","azuresentinel","azure-sentinel-solution-gdpr-compliance","2025-10-08","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"","Garrison ULTRA","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Garrison%20ULTRA","garrisontechnologyltd1725375696148","microsoft-sentinel-solution-garrison-ultra","2024-10-04","","","Garrison","Partner","https://support.ultra.garrison.com","","domains","","","","","","","false","","false" -"Garrison_ULTRARemoteLogs_CL","Garrison ULTRA","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Garrison%20ULTRA","garrisontechnologyltd1725375696148","microsoft-sentinel-solution-garrison-ultra","2024-10-04","","","Garrison","Partner","https://support.ultra.garrison.com","","domains","GarrisonULTRARemoteLogs","Garrison","Garrison ULTRA Remote Logs","The [Garrison ULTRA](https://www.garrison.com/en/garrison-ultra-cloud-platform) Remote Logs connector allows you to ingest Garrison ULTRA Remote Logs into Microsoft Sentinel.","[{""title"": ""Deployment - Azure Resource Manager (ARM) Template"", ""description"": ""These steps outline the automated deployment of the Garrison ULTRA Remote Logs data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Frefs%2Fheads%2Fmaster%2FSolutions%2FGarrison%2520ULTRA%2FData%2520Connectors%2FGarrisonULTRARemoteLogs%2Fazuredeploy_DataCollectionResources.json) \t\t\t\n2. Provide the required details such as Resource Group, Microsoft Sentinel Workspace and ingestion configurations \n> **NOTE:** It is recommended to create a new Resource Group for deployment of these resources.\n3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n4. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Garrison ULTRA"", ""description"": ""To use this data connector you must have an active [Garrison ULTRA](https://www.garrison.com/en/garrison-ultra-cloud-platform) license.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Garrison%20ULTRA/Data%20Connectors/GarrisonULTRARemoteLogs/GarrisonULTRARemoteLogs_ConnectorUI.json","true" -"","Gigamon Connector","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Gigamon%20Connector","gigamon-inc","microsoft-sentinel-solution-gigamon","","","","Gigamon","Partner","https://www.gigamon.com/","","domains","","","","","","","false","","false" -"Gigamon_CL","Gigamon Connector","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Gigamon%20Connector","gigamon-inc","microsoft-sentinel-solution-gigamon","","","","Gigamon","Partner","https://www.gigamon.com/","","domains","GigamonDataConnector","Gigamon","Gigamon AMX Data Connector","Use this data connector to integrate with Gigamon Application Metadata Exporter (AMX) and get data sent directly to Microsoft Sentinel. ","[{""title"": ""Gigamon Data Connector"", ""description"": ""1. Application Metadata Exporter (AMX) application converts the output from the Application Metadata Intelligence (AMI) in CEF format into JSON format and sends it to the cloud tools and Kafka.\n 2. The AMX application can be deployed only on a V Series Node and can be connected to Application Metadata Intelligence running on a physical node or a virtual machine.\n 3. The AMX application and the AMI are managed by GigaVUE-FM. This application is supported on VMware ESXi, VMware NSX-T, AWS and Azure.\n "", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Gigamon%20Connector/Data%20Connectors/Connector_Analytics_Gigamon.json","true" -"","GitHub","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GitHub","microsoftcorporation1622712991604","sentinel4github","2021-10-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"GitHubAuditLogsV2_CL","GitHub","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GitHub","microsoftcorporation1622712991604","sentinel4github","2021-10-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GitHubAuditDefinitionV2","Microsoft","GitHub Enterprise Audit Log (via Codeless Connector Framework) (Preview)","The GitHub audit log connector provides the capability to ingest GitHub logs into Microsoft Sentinel. By connecting GitHub audit logs into Microsoft Sentinel, you can view this data in workbooks, use it to create custom alerts, and improve your investigation process.

**Note:** If you intended to ingest GitHub subscribed events into Microsoft Sentinel, please refer to GitHub (using Webhooks) Connector from ""**Data Connectors**"" gallery.","[{""title"": ""Connect the GitHub Enterprise-level Audit Log to Microsoft Sentinel"", ""description"": ""Enable GitHub audit logs. \n Follow [this guide](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens#creating-a-personal-access-token-classic) to create or find your personal access token."", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Github Enterprise API URL"", ""columnValue"": ""properties.addOnAttributes.ApiUrl""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add Enterprise"", ""title"": ""Add Enterprise"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""parameters"": {""content"": ""Enter your Github Enterprise API URL and API key. Github Enterprise API URL formats:\n* `https://api.github.com/enterprises/{enterprise}`\n* `https://api.{subdomain}.ghe.com/enterprises/{enterprise}`""}, ""type"": ""Markdown""}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Github Enterprise API URL"", ""placeholder"": ""Your Github Enterprise API URL"", ""type"": ""text"", ""name"": ""ApiUrl""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""Enter API Key"", ""type"": ""password"", ""name"": ""apikey""}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""GitHub API personal access token"", ""description"": ""To enable polling for the Enterprise audit log, ensure the authenticated user is an Enterprise admin and has a GitHub personal access token (classic) with the `read:audit_log` scope.""}, {""name"": ""GitHub Enterprise type"", ""description"": ""This connector will only function with GitHub Enterprise Cloud; it will not support GitHub Enterprise Server.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GitHub/Data%20Connectors/GitHubAuditLogs_CCF/GitHubAuditLogs_ConnectorDefinition.json","true" -"GitHubAuditLogPolling_CL","GitHub","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GitHub","microsoftcorporation1622712991604","sentinel4github","2021-10-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GitHubEcAuditLogPolling","GitHub","[Deprecated] GitHub Enterprise Audit Log","The GitHub audit log connector provides the capability to ingest GitHub logs into Microsoft Sentinel. By connecting GitHub audit logs into Microsoft Sentinel, you can view this data in workbooks, use it to create custom alerts, and improve your investigation process.

**Note:** If you intended to ingest GitHub subscribed events into Microsoft Sentinel, please refer to GitHub (using Webhooks) Connector from ""**Data Connectors**"" gallery.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": ""Connect the GitHub Enterprise Organization-level Audit Log to Microsoft Sentinel"", ""description"": ""Enable GitHub audit logs. \n Follow [this guide](https://docs.github.com/en/github/authenticating-to-github/keeping-your-account-and-data-secure/creating-a-personal-access-token) to create or find your personal access token."", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""Organization Name"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{placeHolder1}}"", ""placeHolderValue"": """"}]}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""GitHub API personal access token"", ""description"": ""You need a GitHub personal access token to enable polling for the organization audit log. You may use either a classic token with 'read:org' scope OR a fine-grained token with 'Administration: Read-only' scope.""}, {""name"": ""GitHub Enterprise type"", ""description"": ""This connector will only function with GitHub Enterprise Cloud; it will not support GitHub Enterprise Server. ""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GitHub/Data%20Connectors/azuredeploy_GitHub_native_poller_connector.json","true" -"GitHubAuditLogsV2_CL","GitHub","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GitHub","microsoftcorporation1622712991604","sentinel4github","2021-10-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GitHubEcAuditLogPolling","GitHub","[Deprecated] GitHub Enterprise Audit Log","The GitHub audit log connector provides the capability to ingest GitHub logs into Microsoft Sentinel. By connecting GitHub audit logs into Microsoft Sentinel, you can view this data in workbooks, use it to create custom alerts, and improve your investigation process.

**Note:** If you intended to ingest GitHub subscribed events into Microsoft Sentinel, please refer to GitHub (using Webhooks) Connector from ""**Data Connectors**"" gallery.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": ""Connect the GitHub Enterprise Organization-level Audit Log to Microsoft Sentinel"", ""description"": ""Enable GitHub audit logs. \n Follow [this guide](https://docs.github.com/en/github/authenticating-to-github/keeping-your-account-and-data-secure/creating-a-personal-access-token) to create or find your personal access token."", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""Organization Name"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{placeHolder1}}"", ""placeHolderValue"": """"}]}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""GitHub API personal access token"", ""description"": ""You need a GitHub personal access token to enable polling for the organization audit log. You may use either a classic token with 'read:org' scope OR a fine-grained token with 'Administration: Read-only' scope.""}, {""name"": ""GitHub Enterprise type"", ""description"": ""This connector will only function with GitHub Enterprise Cloud; it will not support GitHub Enterprise Server. ""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GitHub/Data%20Connectors/azuredeploy_GitHub_native_poller_connector.json","true" -"githubscanaudit_CL","GitHub","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GitHub","microsoftcorporation1622712991604","sentinel4github","2021-10-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GitHubWebhook","Microsoft","GitHub (using Webhooks)","The [GitHub](https://www.github.com) webhook data connector provides the capability to ingest GitHub subscribed events into Microsoft Sentinel using [GitHub webhook events](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads). The connector provides ability to get events into Microsoft Sentinel which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

**Note:** If you are intended to ingest Github Audit logs, Please refer to GitHub Enterprise Audit Log Connector from ""**Data Connectors**"" gallery.","[{""title"": """", ""description"": "">**NOTE:** This connector has been built on http trigger based Azure Function. And it provides an endpoint to which github will be connected through it's webhook capability and posts the subscribed events into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Github Webhook connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the GitHub data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-GitHubwebhookAPI-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region and deploy. \n3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the GitHub webhook data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the [Azure Function App](https://aka.ms/sentinel-GitHubWebhookAPI-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration. \n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional) - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""**Post Deployment steps**\n\n""}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""STEP 1 - To get the Azure Function url"", ""description"": "" 1. Go to Azure function Overview page and Click on \""Functions\"" in the left blade.\n 2. Click on the function called \""GithubwebhookConnector\"".\n 3. Go to \""GetFunctionurl\"" and copy the function url.""}, {""title"": ""STEP 2 - Configure Webhook to Github Organization"", ""description"": ""1. Go to [GitHub](https://www.github.com) and open your account and click on \""Your Organizations.\""\n 2. Click on Settings.\n 3. Click on \""Webhooks\"" and enter the function app url which was copied from above STEP 1 under payload URL textbox. \n 4. Choose content type as \""application/json\"". \n 5. Subscribe for events and Click on \""Add Webhook\""""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""*Now we are done with the github Webhook configuration. Once the github events triggered and after the delay of 20 to 30 mins (As there will be a dealy for LogAnalytics to spin up the resources for the first time), you should be able to see all the transactional events from the Github into LogAnalytics workspace table called \""githubscanaudit_CL\"".*\n\n For more details, Click [here](https://aka.ms/sentinel-gitHubwebhooksteps)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GitHub/Data%20Connectors/GithubWebhook/GithubWebhook_API_FunctionApp.json","true" -"","GitLab","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GitLab","azuresentinel","azure-sentinel-solution-gitlab","2022-04-27","2022-06-27","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"Syslog","GitLab","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GitLab","azuresentinel","azure-sentinel-solution-gitlab","2022-04-27","2022-06-27","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GitLab","Microsoft","[Deprecated] GitLab","The [GitLab](https://about.gitlab.com/solutions/devops-platform/) connector allows you to easily connect your GitLab (GitLab Enterprise Edition - Standalone) logs with Microsoft Sentinel. This gives you more security insight into your organization's DevOps pipelines.","[{""title"": ""Configuration"", ""description"": "">This data connector depends on three parsers based on a Kusto Function to work as expected [**GitLab Access Logs**](https://aka.ms/sentinel-GitLabAccess-parser), [**GitLab Audit Logs**](https://aka.ms/sentinel-GitLabAudit-parser) and [**GitLab Application Logs**](https://aka.ms/sentinel-GitLabApp-parser) which are deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n2. Select **Apply below configuration to my machines** and select the facilities and severities.\n3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GitLab/Data%20Connectors/Connector_Syslog_GitLab.json","true" -"","Global Secure Access","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Global%20Secure%20Access","azuresentinel","azure-sentinel-solution-globalsecureaccess","2024-04-08","","","Microsoft Corporation","Microsoft","https://learn.microsoft.com/en-us/entra/global-secure-access/overview-what-is-global-secure-access","","domains","","","","","","","false","","false" -"","Google Apigee","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Apigee","azuresentinel","azure-sentinel-solution-googleapigeex","2021-10-28","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"ApigeeX_CL","Google Apigee","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Apigee","azuresentinel","azure-sentinel-solution-googleapigeex","2021-10-28","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","ApigeeXDataConnector","Google","[DEPRECATED] Google ApigeeX","The [Google ApigeeX](https://cloud.google.com/apigee/docs) data connector provides the capability to ingest ApigeeX audit logs into Microsoft Sentinel using the GCP Logging API. Refer to [GCP Logging API documentation](https://cloud.google.com/logging/docs/reference/v2/rest) for more information.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the GCP API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**ApigeeX**](https://aka.ms/sentinel-ApigeeXDataConnector-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuring GCP and obtaining credentials**\n\n1. Make sure that Logging API is [enabled](https://cloud.google.com/apis/docs/getting-started#enabling_apis). \n\n2. [Create service account](https://cloud.google.com/iam/docs/creating-managing-service-accounts) with [required permissions](https://cloud.google.com/iam/docs/audit-logging#audit_log_permissions) and [get service account key json file](https://cloud.google.com/iam/docs/creating-managing-service-account-keys).\n\n3. Prepare GCP project ID where ApigeeX is located.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as Azure Blob Storage connection string and container name, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ApigeeXDataConnector-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Google Cloud Platform Project Id**, **Google Cloud Platform Credentials File Content**, **Microsoft Sentinel Workspace Id**, **Microsoft Sentinel Shared Key**\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-ApigeeXDataConnector-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions.\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tRESOURCE_NAMES\n\t\tCREDENTIALS_FILE_CONTENT\n\t\tWORKSPACE_ID\n\t\tSHARED_KEY\n\t\tlogAnalyticsUri (Optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://WORKSPACE_ID.ods.opinsights.azure.us`. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""GCP service account"", ""description"": ""GCP service account with permissions to read logs is required for GCP Logging API. Also json file with service account key is required. See the documentation to learn more about [required permissions](https://cloud.google.com/iam/docs/audit-logging#audit_log_permissions), [creating service account](https://cloud.google.com/iam/docs/creating-managing-service-accounts) and [creating service account key](https://cloud.google.com/iam/docs/creating-managing-service-account-keys).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Apigee/Data%20Connectors/ApigeeX_FunctionApp.json","true" -"GCPApigee","Google Apigee","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Apigee","azuresentinel","azure-sentinel-solution-googleapigeex","2021-10-28","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GoogleApigeeXLogsCCPDefinition","Microsoft","Google ApigeeX (via Codeless Connector Framework)","The Google ApigeeX data connector provides the capability to ingest Audit logs into Microsoft Sentinel using the Google Apigee API. Refer to [Google Apigee API](https://cloud.google.com/apigee/docs/reference/apis/apigee/rest/?apix=true) documentation for more information.","[{""instructions"": [{""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Apigee/Data%20Connectors/ApigeeXReadme.md) for log setup and authentication setup tutorial.\n Log set up script: [Click Here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPApigeeLogSetup)\nAuthentication set up script: [Click here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPInitialAuthenticationSetup)"", ""govScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Apigee/Data%20Connectors/ApigeeXReadme.md) for log setup and authentication setup tutorial.\n Log set up script: [Click Here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPApigeeLogSetup)\nAuthentication set up script: [Click here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPInitialAuthenticationSetupGov)""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""TenantId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Enable ApigeeX logs \n In the Google Cloud Console, enable Apigee API, if not enabled previously, and save the changes.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \n To enable ApigeeX Logs for Microsoft Sentinel, click on Add new collector button, provide the required information in the pop up and click on Connect.""}}, {""type"": ""GCPGrid"", ""parameters"": {}}, {""type"": ""GCPContextPane"", ""parameters"": {}}], ""title"": ""Connect Google ApigeeX to Microsoft Sentinel\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Apigee/Data%20Connectors/GoogleApigeeXLog_CCP/GoogleApigeeXLog_ConnectorDefinition.json","true" -"","Google Cloud Platform Audit Logs","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Audit%20Logs","azuresentinel","azure-sentinel-solution-gcpauditlogs-api","2023-03-29","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"","Google Cloud Platform BigQuery","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20BigQuery","azuresentinel","azure-sentinel-solution-gcpbigquery","2023-03-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"","Google Cloud Platform Cloud Monitoring","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Cloud%20Monitoring","azuresentinel","azure-sentinel-solution-gcpmonitoring","2022-07-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"GCPMonitoring","Google Cloud Platform Cloud Monitoring","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Cloud%20Monitoring","azuresentinel","azure-sentinel-solution-gcpmonitoring","2022-07-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GCPMonitorCCPDefinition","Microsoft","Google Cloud Platform Cloud Monitoring (via Codeless Connector Framework)","The Google Cloud Platform Cloud Monitoring data connector ingests Monitoring logs from Google Cloud into Microsoft Sentinel using the Google Cloud Monitoring API. Refer to [Cloud Monitoring API](https://cloud.google.com/monitoring/api/v3) documentation for more details.","[{""title"": ""Connect Google Cloud Platform Cloud Monitoring to Microsoft Sentinel"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Setup GCP Monitoring Integration\n To fetch logs from GCP Cloud Monitoring to Sentinel **Project ID** of Google cloud is required.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Chose the **Metric Type**\n To collect logs from Google Cloud Monitoring provide the required Metric type.\n\nFor more details, refer to [Google Cloud Metrics](https://cloud.google.com/monitoring/api/metrics_gcp).""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. OAuth Credentials\n To Fetch Oauth client id and client secret refer to this [documentation](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Cloud%20Monitoring/Data%20Connectors/Readme.md).""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 4. Connect to Sentinel\n Click on **Connect** to start pulling monitoring logs from Google Cloud into Microsoft Sentinel.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""GCP Project ID"", ""name"": ""projectid"", ""required"": true, ""description"": ""Enter your Google Cloud Project ID.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Metric Type"", ""type"": ""text"", ""name"": ""metrictype"", ""required"": true, ""description"": ""Provide the metric types you want to collect logs for with comma separated.For example: compute.googleapis.com/instance/disk/write_bytes_count,compute.googleapis.com/instance/uptime_total""}}, {""type"": ""OAuthForm"", ""parameters"": {""clientIdLabel"": ""Client ID"", ""clientSecretLabel"": ""Client Secret"", ""connectButtonLabel"": ""Connect"", ""disconnectButtonLabel"": ""Disconnect""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Metric Type"", ""columnValue"": ""properties.addOnAttributes.metrictype""}, {""columnName"": ""Project ID"", ""columnValue"": ""properties.addOnAttributes.projectid""}], ""menuItems"": [""DeleteConnector""]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Cloud%20Monitoring/Data%20Connectors/GCPMonitoringLogs_CCP/GCPCloudMonitoringLogs_ConnectorDefinition.json","true" -"GCP_MONITORING_CL","Google Cloud Platform Cloud Monitoring","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Cloud%20Monitoring","azuresentinel","azure-sentinel-solution-gcpmonitoring","2022-07-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GCPMonitorDataConnector","Google","[DEPRECATED] Google Cloud Platform Cloud Monitoring","The Google Cloud Platform Cloud Monitoring data connector provides the capability to ingest [GCP Monitoring metrics](https://cloud.google.com/monitoring/api/metrics_gcp) into Microsoft Sentinel using the GCP Monitoring API. Refer to [GCP Monitoring API documentation](https://cloud.google.com/monitoring/api/v3) for more information.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the GCP API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**GCP_MONITORING**](https://aka.ms/sentinel-GCPMonitorDataConnector-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuring GCP and obtaining credentials**\n\n1. [Create service account](https://cloud.google.com/iam/docs/creating-managing-service-accounts) with Monitoring Viewer role and [get service account key json file](https://cloud.google.com/iam/docs/creating-managing-service-account-keys).\n\n2. Prepare the list of GCP projects to get metrics from. [Learn more about GCP projects](https://cloud.google.com/resource-manager/docs/cloud-platform-resource-hierarchy).\n\n3. Prepare the list of [GCP metric types](https://cloud.google.com/monitoring/api/metrics_gcp)""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as Azure Blob Storage connection string and container name, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-GCPMonitorDataConnector-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Google Cloud Platform Project Id List**, **Google Cloud Platform Metric Types List**, **Google Cloud Platform Credentials File Content**, **Microsoft Sentinel Workspace Id**, **Microsoft Sentinel Shared Key**\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GCPMonitorDataConnector-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions.\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGCP_PROJECT_ID\n\t\tGCP_METRICS\n\t\tGCP_CREDENTIALS_FILE_CONTENT\n\t\tWORKSPACE_ID\n\t\tSHARED_KEY\n\t\tlogAnalyticsUri (Optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://WORKSPACE_ID.ods.opinsights.azure.us`. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""GCP service account"", ""description"": ""GCP service account with permissions to read Cloud Monitoring metrics is required for GCP Monitoring API (required *Monitoring Viewer* role). Also json file with service account key is required. See the documentation to learn more about [creating service account](https://cloud.google.com/iam/docs/creating-managing-service-accounts) and [creating service account key](https://cloud.google.com/iam/docs/creating-managing-service-account-keys).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Cloud%20Monitoring/Data%20Connectors/GCP_Monitor_API_FunctionApp.json","true" -"","Google Cloud Platform Cloud Run","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Cloud%20Run","azuresentinel","azure-sentinel-solution-gcpcloudrun","2021-07-30","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"GCPCloudRun","Google Cloud Platform Cloud Run","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Cloud%20Run","azuresentinel","azure-sentinel-solution-gcpcloudrun","2021-07-30","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GCPCloudRunLogs_ConnectorDefinition","Microsoft","GCP Cloud Run (via Codeless Connector Framework)","The GCP Cloud Run data connector provides the capability to ingest Cloud Run request logs into Microsoft Sentinel using Pub/Sub. Refer the [Cloud Run Overview](https://cloud.google.com/run/docs/logging) for more details.","[{""instructions"": [{""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Cloud%20Run/Data%20Connectors/README.md) for log setup and authentication setup tutorial.\n\n Find the Log set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPCloudRunLogsSetup)\n & the Authentication set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPInitialAuthenticationSetup)"", ""govScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Cloud%20Run/Data%20Connectors/README.md) for log setup and authentication setup tutorial.\n\n Find the Log set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPCloudRunLogsSetup)\n & the Authentication set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPInitialAuthenticationSetupGov)""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""TenantId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Enable Cloud Run logs \n In the Google Cloud Console, enable cloud logging if not enabled previously, and save the changes.Deploy or update your Cloud Run services with logging enabled.\n\n Reference Link: [Link to documentation](https://cloud.google.com/run/docs/setup)""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \n To enable GCP Cloud Run Request Logs for Microsoft Sentinel, click on Add new collector button, provide the required information in the pop up and click on Connect.""}}, {""type"": ""GCPGrid"", ""parameters"": {}}, {""type"": ""GCPContextPane"", ""parameters"": {}}], ""title"": ""Connect GCP Cloud Run to Microsoft Sentinel\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Cloud%20Run/Data%20Connectors/GCPCloudRunLog_CCF/GCPCloudRunLogs_ConnectorDefinition.json","true" -"","Google Cloud Platform Compute Engine","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Compute%20Engine","azuresentinel","azure-sentinel-solution-gcpcomputeengine","2022-07-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"GCPComputeEngine","Google Cloud Platform Compute Engine","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Compute%20Engine","azuresentinel","azure-sentinel-solution-gcpcomputeengine","2022-07-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GCPComputeEngineLogsCCPDefinition","Microsoft","Google Cloud Platform Compute Engine (via Codeless Connector Framework)","The Google Cloud Platform Compute Engine data connector provides the capability to ingest Compute Engine Audit logs into Microsoft Sentinel using the Google Cloud Compute Engine API. Refer to [Cloud Compute Engine API](https://cloud.google.com/compute/docs/reference/rest/v1) documentation for more information.","[{""instructions"": [{""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Compute%20Engine/Data%20Connectors/GCPComputeEngineReadme.md) for log setup and authentication setup tutorial.\n Log set up script: [Click Here](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPComputeEngineLogsSetup/GCPComputeEngineLogSetup.tf)\nAuthentication set up script: [Click here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPInitialAuthenticationSetup)"", ""govScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Compute%20Engine/Data%20Connectors/GCPComputeEngineReadme.md) for log setup and authentication setup tutorial.\n Log set up script: [Click Here](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPComputeEngineLogsSetup/GCPComputeEngineLogSetup.tf)\nAuthentication set up script: [Click here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPInitialAuthenticationSetupGov)""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""TenantId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Enable Compute Engine logs \n In the Google Cloud Console, enable Compute Engine API, if not enabled previously, and save the changes.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \n To enable Compute Engine Logs for Microsoft Sentinel, click on Add new collector button, provide the required information in the pop up and click on Connect.""}}, {""type"": ""GCPGrid"", ""parameters"": {}}, {""type"": ""GCPContextPane"", ""parameters"": {}}], ""title"": ""Connect GCP Compute Engine to Microsoft Sentinel\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Compute%20Engine/Data%20Connectors/GCPComputeEngineLog_CCP/GCPComputeEngineLog_ConnectorDefinition.json","true" -"","Google Cloud Platform Firewall Logs","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Firewall%20Logs","azuresentinel","azure-sentinel-solution-gcpfirewalllogs-api","2024-11-03","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"","Google Cloud Platform Load Balancer Logs","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Load%20Balancer%20Logs","azuresentinel","azure-sentinel-solution-gcploadbalancerlogs-api","2025-02-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"GCPLoadBalancerLogs_CL","Google Cloud Platform Load Balancer Logs","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Load%20Balancer%20Logs","azuresentinel","azure-sentinel-solution-gcploadbalancerlogs-api","2025-02-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GCPFLoadBalancerLogsCCPDefinition","Microsoft","GCP Pub/Sub Load Balancer Logs (via Codeless Connector Platform).","Google Cloud Platform (GCP) Load Balancer logs provide detailed insights into network traffic, capturing both inbound and outbound activities. These logs are used for monitoring access patterns and identifying potential security threats across GCP resources. Additionally, these logs also include GCP Web Application Firewall (WAF) logs, enhancing the ability to detect and mitigate risks effectively.","[{""instructions"": [{""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Set up your GCP environment \n You must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider and service account with permissions to get and consume from subscription. \n Terraform provides API for the IAM that creates the resources. [Link to Terraform scripts](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation)."", ""govScript"": ""#### 1. Set up your GCP environment \n You must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider and service account with permissions to get and consume from subscription. \n Terraform provides API for the IAM that creates the resources. [Link to Gov Terraform scripts](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov).""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""PoolId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Enable Load Balancer logs \nIn your GCP account, navigate to the Load Balancer section. In here you can nevigate to [**Backend Service**] -> [**Edit**], once you are in the [**Backend Service**] on the [**Logging**] section **enable** the checkbox of [**Enable Logs**]. Once you open the rule, switch the toggle button under the **Logs** section to **On**, and save the changes.\n\nFor more information: [Link to documentation](https://cloud.google.com/load-balancing/docs/https/https-logging-monitoring)""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \n To enable GCP Load Balancer Logs for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""GCPGrid"", ""parameters"": {}}, {""type"": ""GCPContextPane"", ""parameters"": {}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Load%20Balancer%20Logs/Data%20Connectors/GCPFLoadBalancerLogs_GCP_CCP/GCPFLoadBalancerLogs_Definition.json","true" -"","Google Cloud Platform Security Command Center","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Security%20Command%20Center","azuresentinel","azure-sentinel-solution-gcpscclogs-api","2023-09-11","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"GoogleCloudSCC","Google Cloud Platform Security Command Center","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Security%20Command%20Center","azuresentinel","azure-sentinel-solution-gcpscclogs-api","2023-09-11","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GoogleSCCDefinition","Microsoft","Google Security Command Center","The Google Cloud Platform (GCP) Security Command Center is a comprehensive security and risk management platform for Google Cloud, ingested from Sentinel's connector. It offers features such as asset inventory and discovery, vulnerability and threat detection, and risk mitigation and remediation to help you gain insight into your organization's security and data attack surface. This integration enables you to perform tasks related to findings and assets more effectively.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Set up your GCP environment \n You must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider and service account with permissions to get and consume from subscription. \n Terraform provides API for the IAM that creates the resources. [Link to Terraform scripts](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation).""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""PoolId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Connect new collectors \n To enable GCP SCC for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""GCPGrid"", ""parameters"": {}}, {""type"": ""GCPContextPane"", ""parameters"": {}}]}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": false, ""write"": false, ""delete"": false, ""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Security%20Command%20Center/Data%20Connectors/GCPSecurityCommandCenter.json","true" -"","Google Cloud Platform VPC Flow Logs","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20VPC%20Flow%20Logs","azuresentinel","azure-sentinel-solution-gcpvpcflowlogs-api","2025-02-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"GCPVPCFlow","Google Cloud Platform VPC Flow Logs","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20VPC%20Flow%20Logs","azuresentinel","azure-sentinel-solution-gcpvpcflowlogs-api","2025-02-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GCPVPCFlowLogsCCPDefinition","Microsoft","GCP Pub/Sub VPC Flow Logs (via Codeless Connector Framework)","The Google Cloud Platform (GCP) VPC Flow Logs enable you to capture network traffic activity at the VPC level, allowing you to monitor access patterns, analyze network performance, and detect potential threats across GCP resources.","[{""instructions"": [{""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Set up your GCP environment \n You must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider, and service account with permissions to get and consume from the subscription. \n To configure this data connector, execute the following Terraform scripts:\n 1. Setup Required Resources: [Configuration Guide](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPVPCFlowLogsSetup/readme.md)\n 2. Setup Authentication: [Authentication tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup). Note: If Authentication is already setup using another GCP data connector , kindly skip this step and use the existing service account and workload identity pool."", ""govScript"": ""#### 1. Set up your GCP environment \n You must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider, and service account with permissions to get and consume from the subscription. \n To configure this data connector, execute the following Terraform scripts:\n 1. Setup Required Resources: [Configuration Guide]https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPVPCFlowLogsSetup/readme.md)\n 2. Setup Authentication: [Authentication tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup). Note: If Authentication is already setup using another GCP data connector , kindly skip this step and use the existing service account and workload identity pool.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""TenantId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Enable VPC Flow Logs \nIn your GCP account, navigate to the VPC network section. Select the subnet you want to monitor and enable Flow Logs under the Logging section.\n\nFor more information: [Google Cloud Documentation](https://cloud.google.com/vpc/docs/using-flow-logs)""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \n To enable GCP VPC Flow Logs for Microsoft Sentinel, click the Add new collector button, fill in the required information in the context pane, and click Connect.""}}, {""type"": ""GCPGrid"", ""parameters"": {}}, {""type"": ""GCPContextPane"", ""parameters"": {}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20VPC%20Flow%20Logs/Data%20Connectors/GCPVPCFlowLogs_GCP_CCP/GCPVPCFlowLogs_ConnectorDefinition.json","true" -"","Google Kubernetes Engine","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Kubernetes%20Engine","azuresentinel","azure-sentinel-solution-gkelogs-api","2025-04-04","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"GKEAPIServer","Google Kubernetes Engine","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Kubernetes%20Engine","azuresentinel","azure-sentinel-solution-gkelogs-api","2025-04-04","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GKECCPDefinition","Microsoft","Google Kubernetes Engine (via Codeless Connector Framework)","The Google Kubernetes Engine (GKE) Logs enable you to capture cluster activity, workload behavior, and security events, allowing you to monitor Kubernetes workloads, analyze performance, and detect potential threats across GKE clusters.","[{""instructions"": [{""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Set up your GCP environment \nYou must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider, and service account with permissions to get and consume from the subscription.\n\nTo configure this data connector, execute the following Terraform scripts:\n\n1. Setup Required Resources: [Configuration Guide](https://github.com/Alekhya0824/GithubValidationREPO/blob/main/gke/Readme.md)\n2. Setup Authentication: [Authentication tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup). Note: If Authentication is already setup using another GCP data connector, kindly skip this step and use the existing service account and workload identity pool."", ""govScript"": ""#### 1. Set up your GCP environment \nYou must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider, and service account with permissions to get and consume from the subscription.\n\nTo configure this data connector, execute the following Terraform scripts:\n\n1. Setup Required Resources: [Configuration Guide](https://github.com/Alekhya0824/GithubValidationREPO/blob/main/gke/Readme.md)\n2. Setup Authentication: [Authentication tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup). Note: If Authentication is already setup using another GCP data connector, kindly skip this step and use the existing service account and workload identity pool.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""TenantId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Enable Kubernetes Engine Logging \nIn your GCP account, navigate to the Kubernetes Engine section. Enable Cloud Logging for your clusters. Within Cloud Logging, ensure that the specific logs you want to ingest\u2014such as API server, scheduler, controller manager, HPA decision, and application logs\u2014are enabled for effective monitoring and security analysis.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \nTo enable GKE Logs for Microsoft Sentinel, click the **Add new collector** button, fill in the required information in the context pane, and click **Connect**.""}}, {""type"": ""GCPGrid"", ""parameters"": {""collectors"": [{""name"": ""Audit Collector"", ""tableName"": ""GKEAudit""}, {""name"": ""API Server Collector"", ""tableName"": ""GKEAPIServer""}, {""name"": ""Scheduler Collector"", ""tableName"": ""GKEScheduler""}, {""name"": ""Controller Manager Collector"", ""tableName"": ""GKEControllerManager""}, {""name"": ""HPA Decision Collector"", ""tableName"": ""GKEHPADecision""}, {""name"": ""Application Collector"", ""tableName"": ""GKEApplication""}]}}, {""type"": ""GCPContextPane"", ""parameters"": {}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Kubernetes%20Engine/Data%20Connectors/GoogleKubernetesEngineLogs_ccp/GoogleKubernetesEngineLogs_ConnectorDefinition.json","true" -"GKEApplication","Google Kubernetes Engine","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Kubernetes%20Engine","azuresentinel","azure-sentinel-solution-gkelogs-api","2025-04-04","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GKECCPDefinition","Microsoft","Google Kubernetes Engine (via Codeless Connector Framework)","The Google Kubernetes Engine (GKE) Logs enable you to capture cluster activity, workload behavior, and security events, allowing you to monitor Kubernetes workloads, analyze performance, and detect potential threats across GKE clusters.","[{""instructions"": [{""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Set up your GCP environment \nYou must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider, and service account with permissions to get and consume from the subscription.\n\nTo configure this data connector, execute the following Terraform scripts:\n\n1. Setup Required Resources: [Configuration Guide](https://github.com/Alekhya0824/GithubValidationREPO/blob/main/gke/Readme.md)\n2. Setup Authentication: [Authentication tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup). Note: If Authentication is already setup using another GCP data connector, kindly skip this step and use the existing service account and workload identity pool."", ""govScript"": ""#### 1. Set up your GCP environment \nYou must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider, and service account with permissions to get and consume from the subscription.\n\nTo configure this data connector, execute the following Terraform scripts:\n\n1. Setup Required Resources: [Configuration Guide](https://github.com/Alekhya0824/GithubValidationREPO/blob/main/gke/Readme.md)\n2. Setup Authentication: [Authentication tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup). Note: If Authentication is already setup using another GCP data connector, kindly skip this step and use the existing service account and workload identity pool.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""TenantId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Enable Kubernetes Engine Logging \nIn your GCP account, navigate to the Kubernetes Engine section. Enable Cloud Logging for your clusters. Within Cloud Logging, ensure that the specific logs you want to ingest\u2014such as API server, scheduler, controller manager, HPA decision, and application logs\u2014are enabled for effective monitoring and security analysis.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \nTo enable GKE Logs for Microsoft Sentinel, click the **Add new collector** button, fill in the required information in the context pane, and click **Connect**.""}}, {""type"": ""GCPGrid"", ""parameters"": {""collectors"": [{""name"": ""Audit Collector"", ""tableName"": ""GKEAudit""}, {""name"": ""API Server Collector"", ""tableName"": ""GKEAPIServer""}, {""name"": ""Scheduler Collector"", ""tableName"": ""GKEScheduler""}, {""name"": ""Controller Manager Collector"", ""tableName"": ""GKEControllerManager""}, {""name"": ""HPA Decision Collector"", ""tableName"": ""GKEHPADecision""}, {""name"": ""Application Collector"", ""tableName"": ""GKEApplication""}]}}, {""type"": ""GCPContextPane"", ""parameters"": {}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Kubernetes%20Engine/Data%20Connectors/GoogleKubernetesEngineLogs_ccp/GoogleKubernetesEngineLogs_ConnectorDefinition.json","true" -"GKEAudit","Google Kubernetes Engine","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Kubernetes%20Engine","azuresentinel","azure-sentinel-solution-gkelogs-api","2025-04-04","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GKECCPDefinition","Microsoft","Google Kubernetes Engine (via Codeless Connector Framework)","The Google Kubernetes Engine (GKE) Logs enable you to capture cluster activity, workload behavior, and security events, allowing you to monitor Kubernetes workloads, analyze performance, and detect potential threats across GKE clusters.","[{""instructions"": [{""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Set up your GCP environment \nYou must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider, and service account with permissions to get and consume from the subscription.\n\nTo configure this data connector, execute the following Terraform scripts:\n\n1. Setup Required Resources: [Configuration Guide](https://github.com/Alekhya0824/GithubValidationREPO/blob/main/gke/Readme.md)\n2. Setup Authentication: [Authentication tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup). Note: If Authentication is already setup using another GCP data connector, kindly skip this step and use the existing service account and workload identity pool."", ""govScript"": ""#### 1. Set up your GCP environment \nYou must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider, and service account with permissions to get and consume from the subscription.\n\nTo configure this data connector, execute the following Terraform scripts:\n\n1. Setup Required Resources: [Configuration Guide](https://github.com/Alekhya0824/GithubValidationREPO/blob/main/gke/Readme.md)\n2. Setup Authentication: [Authentication tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup). Note: If Authentication is already setup using another GCP data connector, kindly skip this step and use the existing service account and workload identity pool.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""TenantId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Enable Kubernetes Engine Logging \nIn your GCP account, navigate to the Kubernetes Engine section. Enable Cloud Logging for your clusters. Within Cloud Logging, ensure that the specific logs you want to ingest\u2014such as API server, scheduler, controller manager, HPA decision, and application logs\u2014are enabled for effective monitoring and security analysis.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \nTo enable GKE Logs for Microsoft Sentinel, click the **Add new collector** button, fill in the required information in the context pane, and click **Connect**.""}}, {""type"": ""GCPGrid"", ""parameters"": {""collectors"": [{""name"": ""Audit Collector"", ""tableName"": ""GKEAudit""}, {""name"": ""API Server Collector"", ""tableName"": ""GKEAPIServer""}, {""name"": ""Scheduler Collector"", ""tableName"": ""GKEScheduler""}, {""name"": ""Controller Manager Collector"", ""tableName"": ""GKEControllerManager""}, {""name"": ""HPA Decision Collector"", ""tableName"": ""GKEHPADecision""}, {""name"": ""Application Collector"", ""tableName"": ""GKEApplication""}]}}, {""type"": ""GCPContextPane"", ""parameters"": {}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Kubernetes%20Engine/Data%20Connectors/GoogleKubernetesEngineLogs_ccp/GoogleKubernetesEngineLogs_ConnectorDefinition.json","true" -"GKEControllerManager","Google Kubernetes Engine","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Kubernetes%20Engine","azuresentinel","azure-sentinel-solution-gkelogs-api","2025-04-04","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GKECCPDefinition","Microsoft","Google Kubernetes Engine (via Codeless Connector Framework)","The Google Kubernetes Engine (GKE) Logs enable you to capture cluster activity, workload behavior, and security events, allowing you to monitor Kubernetes workloads, analyze performance, and detect potential threats across GKE clusters.","[{""instructions"": [{""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Set up your GCP environment \nYou must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider, and service account with permissions to get and consume from the subscription.\n\nTo configure this data connector, execute the following Terraform scripts:\n\n1. Setup Required Resources: [Configuration Guide](https://github.com/Alekhya0824/GithubValidationREPO/blob/main/gke/Readme.md)\n2. Setup Authentication: [Authentication tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup). Note: If Authentication is already setup using another GCP data connector, kindly skip this step and use the existing service account and workload identity pool."", ""govScript"": ""#### 1. Set up your GCP environment \nYou must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider, and service account with permissions to get and consume from the subscription.\n\nTo configure this data connector, execute the following Terraform scripts:\n\n1. Setup Required Resources: [Configuration Guide](https://github.com/Alekhya0824/GithubValidationREPO/blob/main/gke/Readme.md)\n2. Setup Authentication: [Authentication tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup). Note: If Authentication is already setup using another GCP data connector, kindly skip this step and use the existing service account and workload identity pool.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""TenantId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Enable Kubernetes Engine Logging \nIn your GCP account, navigate to the Kubernetes Engine section. Enable Cloud Logging for your clusters. Within Cloud Logging, ensure that the specific logs you want to ingest\u2014such as API server, scheduler, controller manager, HPA decision, and application logs\u2014are enabled for effective monitoring and security analysis.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \nTo enable GKE Logs for Microsoft Sentinel, click the **Add new collector** button, fill in the required information in the context pane, and click **Connect**.""}}, {""type"": ""GCPGrid"", ""parameters"": {""collectors"": [{""name"": ""Audit Collector"", ""tableName"": ""GKEAudit""}, {""name"": ""API Server Collector"", ""tableName"": ""GKEAPIServer""}, {""name"": ""Scheduler Collector"", ""tableName"": ""GKEScheduler""}, {""name"": ""Controller Manager Collector"", ""tableName"": ""GKEControllerManager""}, {""name"": ""HPA Decision Collector"", ""tableName"": ""GKEHPADecision""}, {""name"": ""Application Collector"", ""tableName"": ""GKEApplication""}]}}, {""type"": ""GCPContextPane"", ""parameters"": {}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Kubernetes%20Engine/Data%20Connectors/GoogleKubernetesEngineLogs_ccp/GoogleKubernetesEngineLogs_ConnectorDefinition.json","true" -"GKEHPADecision","Google Kubernetes Engine","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Kubernetes%20Engine","azuresentinel","azure-sentinel-solution-gkelogs-api","2025-04-04","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GKECCPDefinition","Microsoft","Google Kubernetes Engine (via Codeless Connector Framework)","The Google Kubernetes Engine (GKE) Logs enable you to capture cluster activity, workload behavior, and security events, allowing you to monitor Kubernetes workloads, analyze performance, and detect potential threats across GKE clusters.","[{""instructions"": [{""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Set up your GCP environment \nYou must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider, and service account with permissions to get and consume from the subscription.\n\nTo configure this data connector, execute the following Terraform scripts:\n\n1. Setup Required Resources: [Configuration Guide](https://github.com/Alekhya0824/GithubValidationREPO/blob/main/gke/Readme.md)\n2. Setup Authentication: [Authentication tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup). Note: If Authentication is already setup using another GCP data connector, kindly skip this step and use the existing service account and workload identity pool."", ""govScript"": ""#### 1. Set up your GCP environment \nYou must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider, and service account with permissions to get and consume from the subscription.\n\nTo configure this data connector, execute the following Terraform scripts:\n\n1. Setup Required Resources: [Configuration Guide](https://github.com/Alekhya0824/GithubValidationREPO/blob/main/gke/Readme.md)\n2. Setup Authentication: [Authentication tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup). Note: If Authentication is already setup using another GCP data connector, kindly skip this step and use the existing service account and workload identity pool.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""TenantId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Enable Kubernetes Engine Logging \nIn your GCP account, navigate to the Kubernetes Engine section. Enable Cloud Logging for your clusters. Within Cloud Logging, ensure that the specific logs you want to ingest\u2014such as API server, scheduler, controller manager, HPA decision, and application logs\u2014are enabled for effective monitoring and security analysis.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \nTo enable GKE Logs for Microsoft Sentinel, click the **Add new collector** button, fill in the required information in the context pane, and click **Connect**.""}}, {""type"": ""GCPGrid"", ""parameters"": {""collectors"": [{""name"": ""Audit Collector"", ""tableName"": ""GKEAudit""}, {""name"": ""API Server Collector"", ""tableName"": ""GKEAPIServer""}, {""name"": ""Scheduler Collector"", ""tableName"": ""GKEScheduler""}, {""name"": ""Controller Manager Collector"", ""tableName"": ""GKEControllerManager""}, {""name"": ""HPA Decision Collector"", ""tableName"": ""GKEHPADecision""}, {""name"": ""Application Collector"", ""tableName"": ""GKEApplication""}]}}, {""type"": ""GCPContextPane"", ""parameters"": {}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Kubernetes%20Engine/Data%20Connectors/GoogleKubernetesEngineLogs_ccp/GoogleKubernetesEngineLogs_ConnectorDefinition.json","true" -"GKEScheduler","Google Kubernetes Engine","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Kubernetes%20Engine","azuresentinel","azure-sentinel-solution-gkelogs-api","2025-04-04","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GKECCPDefinition","Microsoft","Google Kubernetes Engine (via Codeless Connector Framework)","The Google Kubernetes Engine (GKE) Logs enable you to capture cluster activity, workload behavior, and security events, allowing you to monitor Kubernetes workloads, analyze performance, and detect potential threats across GKE clusters.","[{""instructions"": [{""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Set up your GCP environment \nYou must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider, and service account with permissions to get and consume from the subscription.\n\nTo configure this data connector, execute the following Terraform scripts:\n\n1. Setup Required Resources: [Configuration Guide](https://github.com/Alekhya0824/GithubValidationREPO/blob/main/gke/Readme.md)\n2. Setup Authentication: [Authentication tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup). Note: If Authentication is already setup using another GCP data connector, kindly skip this step and use the existing service account and workload identity pool."", ""govScript"": ""#### 1. Set up your GCP environment \nYou must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider, and service account with permissions to get and consume from the subscription.\n\nTo configure this data connector, execute the following Terraform scripts:\n\n1. Setup Required Resources: [Configuration Guide](https://github.com/Alekhya0824/GithubValidationREPO/blob/main/gke/Readme.md)\n2. Setup Authentication: [Authentication tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup). Note: If Authentication is already setup using another GCP data connector, kindly skip this step and use the existing service account and workload identity pool.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""TenantId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Enable Kubernetes Engine Logging \nIn your GCP account, navigate to the Kubernetes Engine section. Enable Cloud Logging for your clusters. Within Cloud Logging, ensure that the specific logs you want to ingest\u2014such as API server, scheduler, controller manager, HPA decision, and application logs\u2014are enabled for effective monitoring and security analysis.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \nTo enable GKE Logs for Microsoft Sentinel, click the **Add new collector** button, fill in the required information in the context pane, and click **Connect**.""}}, {""type"": ""GCPGrid"", ""parameters"": {""collectors"": [{""name"": ""Audit Collector"", ""tableName"": ""GKEAudit""}, {""name"": ""API Server Collector"", ""tableName"": ""GKEAPIServer""}, {""name"": ""Scheduler Collector"", ""tableName"": ""GKEScheduler""}, {""name"": ""Controller Manager Collector"", ""tableName"": ""GKEControllerManager""}, {""name"": ""HPA Decision Collector"", ""tableName"": ""GKEHPADecision""}, {""name"": ""Application Collector"", ""tableName"": ""GKEApplication""}]}}, {""type"": ""GCPContextPane"", ""parameters"": {}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Kubernetes%20Engine/Data%20Connectors/GoogleKubernetesEngineLogs_ccp/GoogleKubernetesEngineLogs_ConnectorDefinition.json","true" -"","Google Threat Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Threat%20Intelligence","virustotalsl1681486227461","azure-sentinel-solution-google","2024-10-26","2024-10-26","","Google","Partner","https://www.virustotal.com/gui/contact-us","","domains","","","","","","","false","","false" -"","GoogleCloudPlatformCDN","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformCDN","azuresentinel","azure-sentinel-solution-gcp-cdn","2025-03-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"","GoogleCloudPlatformDNS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformDNS","azuresentinel","azure-sentinel-solution-gcpdns","2022-07-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"GCP_DNS_CL","GoogleCloudPlatformDNS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformDNS","azuresentinel","azure-sentinel-solution-gcpdns","2022-07-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GCPDNSDataConnector","Google","[DEPRECATED] Google Cloud Platform DNS","The Google Cloud Platform DNS data connector provides the capability to ingest [Cloud DNS query logs](https://cloud.google.com/dns/docs/monitoring#using_logging) and [Cloud DNS audit logs](https://cloud.google.com/dns/docs/audit-logging) into Microsoft Sentinel using the GCP Logging API. Refer to [GCP Logging API documentation](https://cloud.google.com/logging/docs/api) for more information.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the GCP API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**GCPCloudDNS**](https://aka.ms/sentinel-GCPDNSDataConnector-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuring GCP and obtaining credentials**\n\n1. Make sure that Logging API is [enabled](https://cloud.google.com/apis/docs/getting-started#enabling_apis). \n\n2. [Create service account](https://cloud.google.com/iam/docs/creating-managing-service-accounts) with Logs Viewer role (or at least with \""logging.logEntries.list\"" permission) and [get service account key json file](https://cloud.google.com/iam/docs/creating-managing-service-account-keys).\n\n3. Prepare the list of GCP resources (organizations, folders, projects) to get logs from. [Learn more about GCP resources](https://cloud.google.com/resource-manager/docs/cloud-platform-resource-hierarchy).""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as Azure Blob Storage connection string and container name, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-GCPDNSDataConnector-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Google Cloud Platform Resource Names**, **Google Cloud Platform Credentials File Content**, **Microsoft Sentinel Workspace Id**, **Microsoft Sentinel Shared Key**\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GCPDNSDataConnector-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions.\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tRESOURCE_NAMES\n\t\tCREDENTIALS_FILE_CONTENT\n\t\tWORKSPACE_ID\n\t\tSHARED_KEY\n\t\tlogAnalyticsUri (Optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://WORKSPACE_ID.ods.opinsights.azure.us`. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""GCP service account"", ""description"": ""GCP service account with permissions to read logs (with \""logging.logEntries.list\"" permission) is required for GCP Logging API. Also json file with service account key is required. See the documentation to learn more about [permissions](https://cloud.google.com/logging/docs/access-control#permissions_and_roles), [creating service account](https://cloud.google.com/iam/docs/creating-managing-service-accounts) and [creating service account key](https://cloud.google.com/iam/docs/creating-managing-service-account-keys).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformDNS/Data%20Connectors/GCP_DNS_API_FunctionApp.json","true" -"","GoogleCloudPlatformIAM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformIAM","azuresentinel","azure-sentinel-solution-gcpiam","2021-07-30","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"GCP_IAM_CL","GoogleCloudPlatformIAM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformIAM","azuresentinel","azure-sentinel-solution-gcpiam","2021-07-30","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GCPIAMDataConnector","Google","[DEPRECATED] Google Cloud Platform IAM","The Google Cloud Platform Identity and Access Management (IAM) data connector provides the capability to ingest [GCP IAM logs](https://cloud.google.com/iam/docs/audit-logging) into Microsoft Sentinel using the GCP Logging API. Refer to [GCP Logging API documentation](https://cloud.google.com/logging/docs/api) for more information.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the GCP API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**GCP_IAM**](https://aka.ms/sentinel-GCPIAMDataConnector-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuring GCP and obtaining credentials**\n\n1. Make sure that Logging API is [enabled](https://cloud.google.com/apis/docs/getting-started#enabling_apis). \n\n2. (Optional) [Enable Data Access Audit logs](https://cloud.google.com/logging/docs/audit/configure-data-access#config-console-enable).\n\n3. [Create service account](https://cloud.google.com/iam/docs/creating-managing-service-accounts) with [required permissions](https://cloud.google.com/iam/docs/audit-logging#audit_log_permissions) and [get service account key json file](https://cloud.google.com/iam/docs/creating-managing-service-account-keys).\n\n4. Prepare the list of GCP resources (organizations, folders, projects) to get logs from. [Learn more about GCP resources](https://cloud.google.com/resource-manager/docs/cloud-platform-resource-hierarchy).""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as Azure Blob Storage connection string and container name, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-GCPIAMDataConnector-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Google Cloud Platform Resource Names**, **Google Cloud Platform Credentials File Content**, **Microsoft Sentinel Workspace Id**, **Microsoft Sentinel Shared Key**\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GCPIAMDataConnector-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions.\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tRESOURCE_NAMES\n\t\tCREDENTIALS_FILE_CONTENT\n\t\tWORKSPACE_ID\n\t\tSHARED_KEY\n\t\tlogAnalyticsUri (Optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://WORKSPACE_ID.ods.opinsights.azure.us`. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""GCP service account"", ""description"": ""GCP service account with permissions to read logs is required for GCP Logging API. Also json file with service account key is required. See the documentation to learn more about [required permissions](https://cloud.google.com/iam/docs/audit-logging#audit_log_permissions), [creating service account](https://cloud.google.com/iam/docs/creating-managing-service-accounts) and [creating service account key](https://cloud.google.com/iam/docs/creating-managing-service-account-keys).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformIAM/Data%20Connectors/GCP_IAM_API_FunctionApp.json","true" -"","GoogleCloudPlatformIDS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformIDS","azuresentinel","azure-sentinel-solution-gcpids","2022-07-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"","GoogleCloudPlatformNAT","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformNAT","azuresentinel","azure-sentinel-solution-gcp-nat","2025-05-29","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"GCPNATAudit","GoogleCloudPlatformNAT","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformNAT","azuresentinel","azure-sentinel-solution-gcp-nat","2025-05-29","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GCPNATLogsCCPDefinition","Microsoft","Google Cloud Platform NAT (via Codeless Connector Framework)","The Google Cloud Platform NAT data connector provides the capability to ingest Cloud NAT Audit logs and Cloud NAT Traffic logs into Microsoft Sentinel using the Compute Engine API. Refer the [Product overview](https://cloud.google.com/nat/docs/overview) document for more details.","[{""instructions"": [{""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformNAT/Data%20Connectors/README.md) for log setup and authentication setup tutorial.\n\n Find the Log set up script [**here**](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPCloudNATLogsSetup/GCPCloudNATLogsSetup.tf)\n & the Authentication set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPInitialAuthenticationSetup)"", ""govScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformNAT/Data%20Connectors/README.md) for log setup and authentication setup tutorial.\n\n Find the Log set up script [**here**](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPCloudNATLogsSetup/GCPCloudNATLogsSetup.tf)\n & the Authentication set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPInitialAuthenticationSetupGov)""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""TenantId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Enable NAT logs \n In the Google Cloud Console, enable cloud logging if not enabled previously, and save the changes. Navigate to Cloud NAT section and click on Add origin to create backends as per link provided below. \n\n Reference Link: [Link to documentation](https://cloud.google.com/nat/docs/monitoring)""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \n To enable GCP Cloud NAT Logs for Microsoft Sentinel, click on Add new collector button, provide the required information in the pop up and click on Connect.""}}, {""type"": ""GCPGrid"", ""parameters"": {}}, {""type"": ""GCPContextPane"", ""parameters"": {}}], ""title"": ""Connect GCP NAT to Microsoft Sentinel\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformNAT/Data%20Connectors/GCPNATLogs_ccp/GCPNATLogs_ConnectorDefinition.json","true" -"","GoogleCloudPlatformResourceManager","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformResourceManager","azuresentinel","azure-sentinel-solution-gcp-rm","2025-03-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"GCPResourceManager","GoogleCloudPlatformResourceManager","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformResourceManager","azuresentinel","azure-sentinel-solution-gcp-rm","2025-03-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GCPResourceManagerLogsCCFDefinition","Microsoft","Google Cloud Platform Resource Manager (via Codeless Connector Framework)","The Google Cloud Platform Resource Manager data connector provides the capability to ingest Resource Manager [Admin Activity and Data Access Audit logs](https://cloud.google.com/resource-manager/docs/audit-logging) into Microsoft Sentinel using the Cloud Resource Manager API. Refer the [Product overview](https://cloud.google.com/resource-manager/docs/cloud-platform-resource-hierarchy) document for more details.","[{""instructions"": [{""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleCloudPlatformResourceManager/Data%20Connectors/README.md) for log setup and authentication setup tutorial.\n\n Find the Log set up script [**here**](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPResourceManagerLogsSetup/GCPResourceManagerLogSetup.tf)\n & the Authentication set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPInitialAuthenticationSetup)"", ""govScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleCloudPlatformResourceManager/Data%20Connectors/README.md) for log setup and authentication setup tutorial.\n\n Find the Log set up script [**here**](https://raw.githubusercontent.com/Azure/Azure-Sentinel/c1cb589dad1add228f78e629073a9b069ce52991/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPResourceManagerLogsSetup/GCPResourceManagerLogSetup.tf)\n & the Authentication set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPInitialAuthenticationSetupGov)""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""TenantId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Enable Resource Manager logs \n In the Google Cloud Console, enable cloud resource manager API if not enabled previously, and save the changes. Make sure to have organization level IAM permissions for your account to see all logs in the resource hierarchy. You can refer the document links for different IAM permissions for access control with IAM at each level provided in this [link](https://cloud.google.com/resource-manager/docs/how-to)""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \n To enable GCP Resource Manager Logs for Microsoft Sentinel, click on Add new collector button, provide the required information in the pop up and click on Connect.""}}, {""type"": ""GCPGrid"", ""parameters"": {}}, {""type"": ""GCPContextPane"", ""parameters"": {}}], ""title"": ""Connect GCP Resource Manager to Microsoft Sentinel\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformResourceManager/Data%20Connectors/GCPResourceManagerAuditLogs_ccf/GCPResourceManagerAuditLogs_ConnectorDefinition.json","true" -"","GoogleCloudPlatformSQL","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformSQL","azuresentinel","azure-sentinel-solution-gcpsql","2021-07-30","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"GCPCloudSQL","GoogleCloudPlatformSQL","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformSQL","azuresentinel","azure-sentinel-solution-gcpsql","2021-07-30","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GCPCloudSQLCCFDefinition","Microsoft","GCP Cloud SQL (via Codeless Connector Framework)","The GCP Cloud SQL data connector provides the capability to ingest Audit logs into Microsoft Sentinel using the GCP Cloud SQL API. Refer to [GCP cloud SQL Audit Logs](https://cloud.google.com/sql/docs/mysql/audit-logging) documentation for more information.","[{""instructions"": [{""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformSQL/Data%20Connectors/Readme.md) for log setup and authentication setup tutorial.\n Log set up script: [Click Here](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPCloudSQLLogsSetup/GCPCloudSQLLogsSetup.tf)\nAuthentication set up script: [Click here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPInitialAuthenticationSetup)"", ""govScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformSQL/Data%20Connectors/Readme.md) for log setup and authentication setup tutorial.\n Log set up script: [Click Here](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPCloudSQLLogsSetup/GCPCloudSQLLogsSetup.tf)\nAuthentication set up script: [Click here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPInitialAuthenticationSetupGov)""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""TenantId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. In the Google Cloud Console, enable Cloud SQL API, if not enabled previously, and save the changes.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \n To enable GCP Cloud SQL Logs for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""GCPGrid"", ""parameters"": {}}, {""type"": ""GCPContextPane"", ""parameters"": {}}], ""title"": ""Connect GCP Cloud SQL to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformSQL/Data%20Connectors/GCPCloudSQLLog_CCF/GCPCloudSQLLog_ConnectorDefinition.json","true" -"","GoogleDirectory","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleDirectory","","","","","","","","","","","","","","","","","false","","false" -"","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"GoogleWorkspaceReports","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceCCPDefinition","Microsoft","Google Workspace Activities (via Codeless Connector Framework)","The [Google Workspace](https://workspace.google.com/) Activities data connector provides the capability to ingest Activity Events from [Google Workspace API](https://developers.google.com/admin-sdk/reports/reference/rest/v1/activities/list) into Microsoft Sentinel.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the Google Reports API\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add https://portal.azure.com/TokenAuthorize/ExtensionName/Microsoft_Azure_Security_Insights as the Authorized redirect URIs.\n\t 4. Once you click Create, you will be provided with the Client ID and Client Secret. \n\tCopy these values and use them in the configuration steps below.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""Configure steps for the Google Reports API oauth access. Then, provide the required information below and click on Connect.\n>""}}, {""description"": ""Configure steps for the Google Reports API oauth access. Then, provide the required information below and click on Connect.\n>"", ""type"": ""OAuthForm"", ""parameters"": {""clientIdLabel"": ""Client ID"", ""clientSecretLabel"": ""Client Secret"", ""connectButtonLabel"": ""Connect"", ""disconnectButtonLabel"": ""Disconnect""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""name"", ""columnName"": ""Name""}, {""columnValue"": ""id"", ""columnName"": ""ID""}]}}], ""title"": ""Connect to Google Workspace to start collecting user activity logs into Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Google Workspace API access"", ""description"": ""Access to the Google Workspace activities API through Oauth are required.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GoogleWorkspaceTemplate_ccp/GoogleWorkspaceReports_DataConnectorDefinition.json","true" -"GWorkspace_ReportsAPI_access_transparency_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" -"GWorkspace_ReportsAPI_admin_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" -"GWorkspace_ReportsAPI_calendar_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" -"GWorkspace_ReportsAPI_chat_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" -"GWorkspace_ReportsAPI_chrome_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" -"GWorkspace_ReportsAPI_context_aware_access_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" -"GWorkspace_ReportsAPI_data_studio_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" -"GWorkspace_ReportsAPI_drive_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" -"GWorkspace_ReportsAPI_gcp_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" -"GWorkspace_ReportsAPI_gplus_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" -"GWorkspace_ReportsAPI_groups_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" -"GWorkspace_ReportsAPI_groups_enterprise_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" -"GWorkspace_ReportsAPI_jamboard_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" -"GWorkspace_ReportsAPI_keep_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" -"GWorkspace_ReportsAPI_login_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" -"GWorkspace_ReportsAPI_meet_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" -"GWorkspace_ReportsAPI_mobile_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" -"GWorkspace_ReportsAPI_rules_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" -"GWorkspace_ReportsAPI_saml_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" -"GWorkspace_ReportsAPI_token_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" -"GWorkspace_ReportsAPI_user_accounts_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" -"GoogleWorkspaceReports_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" -"","GreyNoiseThreatIntelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GreyNoiseThreatIntelligence","greynoiseintelligenceinc1681236078693","microsoft-sentinel-byol-greynoise","2023-09-05","2025-07-28","","GreyNoise","Partner","https://www.greynoise.io/contact/general","","domains","","","","","","","false","","false" -"ThreatIntelligenceIndicator","GreyNoiseThreatIntelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GreyNoiseThreatIntelligence","greynoiseintelligenceinc1681236078693","microsoft-sentinel-byol-greynoise","2023-09-05","2025-07-28","","GreyNoise","Partner","https://www.greynoise.io/contact/general","","domains","GreyNoise2SentinelAPI","GreyNoise, Inc. and BlueCycle LLC","GreyNoise Threat Intelligence","This Data Connector installs an Azure Function app to download GreyNoise indicators once per day and inserts them into the ThreatIntelligenceIndicator table in Microsoft Sentinel.","[{""title"": ""You can connect GreyNoise Threat Intelligence to Microsoft Sentinel by following the below steps: "", ""description"": ""\n> The following steps create an Azure AAD application, retrieves a GreyNoise API key, and saves the values in an Azure Function App Configuration.""}, {""title"": ""1. Retrieve your API Key from GreyNoise Visualizer."", ""description"": ""Generate an API key from GreyNoise Visualizer https://docs.greynoise.io/docs/using-the-greynoise-api""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID and Client ID. Also, get the Log Analytics Workspace ID associated with your Microsoft Sentinel instance (it should display below)."", ""description"": ""Follow the instructions here to create your Azure AAD app and save your Client ID and Tenant ID: https://learn.microsoft.com/en-us/azure/sentinel/connect-threat-intelligence-upload-api#instructions\n NOTE: Wait until step 5 to generate your client secret."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Follow the instructions here to add the Microsoft Sentinel Contributor Role: https://learn.microsoft.com/en-us/azure/sentinel/connect-threat-intelligence-upload-api#assign-a-role-to-the-application""}, {""title"": ""4. Specify the AAD permissions to enable MS Graph API access to the upload-indicators API."", ""description"": ""Follow this section here to add **'ThreatIndicators.ReadWrite.OwnedBy'** permission to the AAD App: https://learn.microsoft.com/en-us/azure/sentinel/connect-threat-intelligence-tip#specify-the-permissions-required-by-the-application. \n Back in your AAD App, ensure you grant admin consent for the permissions you just added. \n Finally, in the 'Tokens and APIs' section, generate a client secret and save it. You will need it in Step 6. ""}, {""title"": ""5. Deploy the Threat Intelligence (Preview) Solution, which includes the Threat Intelligence Upload Indicators API (Preview)"", ""description"": ""See Microsoft Sentinel Content Hub for this Solution, and install it in the Microsoft Sentinel instance.""}, {""title"": ""6. Deploy the Azure Function"", ""description"": ""Click the Deploy to Azure button.\n\n [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-GreyNoise-azuredeploy)\n\n Fill in the appropriate values for each parameter. **Be aware** that the only valid values for the **GREYNOISE_CLASSIFICATIONS** parameter are **benign**, **malicious** and/or **unknown**, which must be comma-separated.""}, {""title"": ""7. Send indicators to Sentinel"", ""description"": ""The function app installed in Step 6 queries the GreyNoise GNQL API once per day, and submits each indicator found in STIX 2.1 format to the [Microsoft Upload Threat Intelligence Indicators API](https://learn.microsoft.com/en-us/azure/sentinel/upload-indicators-api). \n Each indicator expires in ~24 hours from creation unless found on the next day's query. In this case the TI Indicator's **Valid Until** time is extended for another 24 hours, which keeps it active in Microsoft Sentinel. \n\n For more information on the GreyNoise API and the GreyNoise Query Language (GNQL), [click here](https://developer.greynoise.io/docs/using-the-greynoise-api).""}]","{""resourceProvider"": [{""provider"": ""Microsoft.SecurityInsights/threatintelligence/write"", ""permissionsDisplayText"": ""write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""GreyNoise API Key"", ""description"": ""Retrieve your GreyNoise API Key [here](https://viz.greynoise.io/account/api-key).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GreyNoiseThreatIntelligence/Data%20Connectors/GreyNoiseConnector_UploadIndicatorsAPI.json","true" -"","Group-IB","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Group-IB","","","","","","","","","","","","","","","","","false","","false" -"","HIPAA Compliance","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/HIPAA%20Compliance","azuresentinel","azure-sentinel-solution-hipaacompliance","2025-10-08","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"","HYAS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/HYAS","hyas","a-hyas-insight-azure-sentinel-solutions-gallery","2021-10-20","","","HYAS","Partner","https://www.hyas.com/contact","","domains","","","","","","","false","","false" -"","HYAS Protect","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/HYAS%20Protect","hyas","microsoft-sentinel-solution-hyas-protect","2023-09-26","","","HYAS","Partner","https://www.hyas.com/contact","","domains","","","","","","","false","","false" -"HYASProtectDnsSecurityLogs_CL","HYAS Protect","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/HYAS%20Protect","hyas","microsoft-sentinel-solution-hyas-protect","2023-09-26","","","HYAS","Partner","https://www.hyas.com/contact","","domains","HYASProtect","HYAS","HYAS Protect","HYAS Protect provide logs based on reputation values - Blocked, Malicious, Permitted, Suspicious.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the HYAS API to pull Logs into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the HYAS Protect data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-HYASProtect-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Function Name**, **Table Name**, **Workspace ID**, **Workspace Key**, **API Key**, **TimeInterval**, **FetchBlockedDomains**, **FetchMaliciousDomains**, **FetchSuspiciousDomains**, **FetchPermittedDomains** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the HYAS Protect Logs data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> NOTE:You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-HYASProtect-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. HyasProtectLogsXXX).\n\n\te. **Select a runtime:** Choose Python 3.8.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tAPIKey\n\t\tPolling\n\t\tWorkspaceID\n\t\tWorkspaceKey\n. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**HYAS API Key** is required for making API calls.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/HYAS%20Protect/Data%20Connectors/HYASProtect_FunctionApp.json","true" -"","Halcyon","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Halcyon","halcyonransomware","azure-sentinel-solution-halcyon","2025-12-22","2025-12-22","","Halcyon","Partner","https://www.halcyon.ai","","domains","","","","","","","false","","false" -"HalcyonAuthenticationEvents_CL","Halcyon","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Halcyon","halcyonransomware","azure-sentinel-solution-halcyon","2025-12-22","2025-12-22","","Halcyon","Partner","https://www.halcyon.ai","","domains","HalcyonPush","Halcyon","Halcyon Connector","The [Halcyon](https://www.halcyon.ai) connector provides the capability to send data from Halcyon to Microsoft Sentinel.","[{""title"": ""1. Create ARM Resources and Provision Required Permissions"", ""description"": ""This connector reads data from the tables that Halcyon uses in a Microsoft Analytics Workspace, if the data is being forwarded"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Automated Configuration and Secure Data Ingestion with Entra Application \nClicking on \""Deploy\"" will trigger the creation of Log Analytics tables and a Data Collection Rule (DCR). \nIt will then create an Entra application, link the DCR to it, and set the entered secret in the application. This setup enables data to be sent securely to the DCR using an Entra token.""}}, {""parameters"": {""label"": ""Deploy Halcyon Connector Resources"", ""applicationDisplayName"": ""Halcyon Connector Application""}, ""type"": ""DeployPushConnectorButton""}]}, {""title"": ""2. Configured your integration in the Halcyon Platform"", ""description"": ""Use the following parameters to configure your integration in the Halcyon Platform."", ""instructions"": [{""parameters"": {""label"": ""Directory ID (Tenant ID)"", ""fillWith"": [""TenantId""]}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Application ID (Client ID)"", ""fillWith"": [""ApplicationId""], ""placeholder"": ""Deploy the Push Connector to get the App Registration Application ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Secret (Credential Secret) (THIS SECRET WILL NOT BE VISIBLE AFTER LEAVING THIS PAGE)"", ""fillWith"": [""ApplicationSecret""], ""placeholder"": ""Deploy the Push Connector to get the App Registration Secret""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Endpoint (URL)"", ""fillWith"": [""DataCollectionEndpoint""], ""placeholder"": ""Deploy the Push Connector to get the Data Collection Endpoint""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Rule Immutable ID (Rule ID)"", ""fillWith"": [""DataCollectionRuleId""], ""placeholder"": ""Deploy the Push Connector to get the Data Collection Rule ID""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace Permissions"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": false}}], ""customs"": [{""name"": ""Microsoft Entra Create Permissions"", ""description"": ""Permissions to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher.""}, {""name"": ""Role Assignment Permissions"", ""description"": ""Write permissions required to assign Monitoring Metrics Publisher role to the data collection rule (DCR). Typically requires Owner or User Access Administrator role at the resource group level.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Halcyon/Data%20Connectors/Halcyon_ccp/Halcyon_connectorDefinition.json","true" -"HalcyonDnsActivity_CL","Halcyon","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Halcyon","halcyonransomware","azure-sentinel-solution-halcyon","2025-12-22","2025-12-22","","Halcyon","Partner","https://www.halcyon.ai","","domains","HalcyonPush","Halcyon","Halcyon Connector","The [Halcyon](https://www.halcyon.ai) connector provides the capability to send data from Halcyon to Microsoft Sentinel.","[{""title"": ""1. Create ARM Resources and Provision Required Permissions"", ""description"": ""This connector reads data from the tables that Halcyon uses in a Microsoft Analytics Workspace, if the data is being forwarded"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Automated Configuration and Secure Data Ingestion with Entra Application \nClicking on \""Deploy\"" will trigger the creation of Log Analytics tables and a Data Collection Rule (DCR). \nIt will then create an Entra application, link the DCR to it, and set the entered secret in the application. This setup enables data to be sent securely to the DCR using an Entra token.""}}, {""parameters"": {""label"": ""Deploy Halcyon Connector Resources"", ""applicationDisplayName"": ""Halcyon Connector Application""}, ""type"": ""DeployPushConnectorButton""}]}, {""title"": ""2. Configured your integration in the Halcyon Platform"", ""description"": ""Use the following parameters to configure your integration in the Halcyon Platform."", ""instructions"": [{""parameters"": {""label"": ""Directory ID (Tenant ID)"", ""fillWith"": [""TenantId""]}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Application ID (Client ID)"", ""fillWith"": [""ApplicationId""], ""placeholder"": ""Deploy the Push Connector to get the App Registration Application ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Secret (Credential Secret) (THIS SECRET WILL NOT BE VISIBLE AFTER LEAVING THIS PAGE)"", ""fillWith"": [""ApplicationSecret""], ""placeholder"": ""Deploy the Push Connector to get the App Registration Secret""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Endpoint (URL)"", ""fillWith"": [""DataCollectionEndpoint""], ""placeholder"": ""Deploy the Push Connector to get the Data Collection Endpoint""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Rule Immutable ID (Rule ID)"", ""fillWith"": [""DataCollectionRuleId""], ""placeholder"": ""Deploy the Push Connector to get the Data Collection Rule ID""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace Permissions"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": false}}], ""customs"": [{""name"": ""Microsoft Entra Create Permissions"", ""description"": ""Permissions to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher.""}, {""name"": ""Role Assignment Permissions"", ""description"": ""Write permissions required to assign Monitoring Metrics Publisher role to the data collection rule (DCR). Typically requires Owner or User Access Administrator role at the resource group level.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Halcyon/Data%20Connectors/Halcyon_ccp/Halcyon_connectorDefinition.json","true" -"HalcyonFileActivity_CL","Halcyon","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Halcyon","halcyonransomware","azure-sentinel-solution-halcyon","2025-12-22","2025-12-22","","Halcyon","Partner","https://www.halcyon.ai","","domains","HalcyonPush","Halcyon","Halcyon Connector","The [Halcyon](https://www.halcyon.ai) connector provides the capability to send data from Halcyon to Microsoft Sentinel.","[{""title"": ""1. Create ARM Resources and Provision Required Permissions"", ""description"": ""This connector reads data from the tables that Halcyon uses in a Microsoft Analytics Workspace, if the data is being forwarded"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Automated Configuration and Secure Data Ingestion with Entra Application \nClicking on \""Deploy\"" will trigger the creation of Log Analytics tables and a Data Collection Rule (DCR). \nIt will then create an Entra application, link the DCR to it, and set the entered secret in the application. This setup enables data to be sent securely to the DCR using an Entra token.""}}, {""parameters"": {""label"": ""Deploy Halcyon Connector Resources"", ""applicationDisplayName"": ""Halcyon Connector Application""}, ""type"": ""DeployPushConnectorButton""}]}, {""title"": ""2. Configured your integration in the Halcyon Platform"", ""description"": ""Use the following parameters to configure your integration in the Halcyon Platform."", ""instructions"": [{""parameters"": {""label"": ""Directory ID (Tenant ID)"", ""fillWith"": [""TenantId""]}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Application ID (Client ID)"", ""fillWith"": [""ApplicationId""], ""placeholder"": ""Deploy the Push Connector to get the App Registration Application ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Secret (Credential Secret) (THIS SECRET WILL NOT BE VISIBLE AFTER LEAVING THIS PAGE)"", ""fillWith"": [""ApplicationSecret""], ""placeholder"": ""Deploy the Push Connector to get the App Registration Secret""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Endpoint (URL)"", ""fillWith"": [""DataCollectionEndpoint""], ""placeholder"": ""Deploy the Push Connector to get the Data Collection Endpoint""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Rule Immutable ID (Rule ID)"", ""fillWith"": [""DataCollectionRuleId""], ""placeholder"": ""Deploy the Push Connector to get the Data Collection Rule ID""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace Permissions"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": false}}], ""customs"": [{""name"": ""Microsoft Entra Create Permissions"", ""description"": ""Permissions to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher.""}, {""name"": ""Role Assignment Permissions"", ""description"": ""Write permissions required to assign Monitoring Metrics Publisher role to the data collection rule (DCR). Typically requires Owner or User Access Administrator role at the resource group level.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Halcyon/Data%20Connectors/Halcyon_ccp/Halcyon_connectorDefinition.json","true" -"HalcyonNetworkSession_CL","Halcyon","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Halcyon","halcyonransomware","azure-sentinel-solution-halcyon","2025-12-22","2025-12-22","","Halcyon","Partner","https://www.halcyon.ai","","domains","HalcyonPush","Halcyon","Halcyon Connector","The [Halcyon](https://www.halcyon.ai) connector provides the capability to send data from Halcyon to Microsoft Sentinel.","[{""title"": ""1. Create ARM Resources and Provision Required Permissions"", ""description"": ""This connector reads data from the tables that Halcyon uses in a Microsoft Analytics Workspace, if the data is being forwarded"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Automated Configuration and Secure Data Ingestion with Entra Application \nClicking on \""Deploy\"" will trigger the creation of Log Analytics tables and a Data Collection Rule (DCR). \nIt will then create an Entra application, link the DCR to it, and set the entered secret in the application. This setup enables data to be sent securely to the DCR using an Entra token.""}}, {""parameters"": {""label"": ""Deploy Halcyon Connector Resources"", ""applicationDisplayName"": ""Halcyon Connector Application""}, ""type"": ""DeployPushConnectorButton""}]}, {""title"": ""2. Configured your integration in the Halcyon Platform"", ""description"": ""Use the following parameters to configure your integration in the Halcyon Platform."", ""instructions"": [{""parameters"": {""label"": ""Directory ID (Tenant ID)"", ""fillWith"": [""TenantId""]}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Application ID (Client ID)"", ""fillWith"": [""ApplicationId""], ""placeholder"": ""Deploy the Push Connector to get the App Registration Application ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Secret (Credential Secret) (THIS SECRET WILL NOT BE VISIBLE AFTER LEAVING THIS PAGE)"", ""fillWith"": [""ApplicationSecret""], ""placeholder"": ""Deploy the Push Connector to get the App Registration Secret""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Endpoint (URL)"", ""fillWith"": [""DataCollectionEndpoint""], ""placeholder"": ""Deploy the Push Connector to get the Data Collection Endpoint""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Rule Immutable ID (Rule ID)"", ""fillWith"": [""DataCollectionRuleId""], ""placeholder"": ""Deploy the Push Connector to get the Data Collection Rule ID""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace Permissions"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": false}}], ""customs"": [{""name"": ""Microsoft Entra Create Permissions"", ""description"": ""Permissions to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher.""}, {""name"": ""Role Assignment Permissions"", ""description"": ""Write permissions required to assign Monitoring Metrics Publisher role to the data collection rule (DCR). Typically requires Owner or User Access Administrator role at the resource group level.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Halcyon/Data%20Connectors/Halcyon_ccp/Halcyon_connectorDefinition.json","true" -"HalcyonProcessEvent_CL","Halcyon","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Halcyon","halcyonransomware","azure-sentinel-solution-halcyon","2025-12-22","2025-12-22","","Halcyon","Partner","https://www.halcyon.ai","","domains","HalcyonPush","Halcyon","Halcyon Connector","The [Halcyon](https://www.halcyon.ai) connector provides the capability to send data from Halcyon to Microsoft Sentinel.","[{""title"": ""1. Create ARM Resources and Provision Required Permissions"", ""description"": ""This connector reads data from the tables that Halcyon uses in a Microsoft Analytics Workspace, if the data is being forwarded"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Automated Configuration and Secure Data Ingestion with Entra Application \nClicking on \""Deploy\"" will trigger the creation of Log Analytics tables and a Data Collection Rule (DCR). \nIt will then create an Entra application, link the DCR to it, and set the entered secret in the application. This setup enables data to be sent securely to the DCR using an Entra token.""}}, {""parameters"": {""label"": ""Deploy Halcyon Connector Resources"", ""applicationDisplayName"": ""Halcyon Connector Application""}, ""type"": ""DeployPushConnectorButton""}]}, {""title"": ""2. Configured your integration in the Halcyon Platform"", ""description"": ""Use the following parameters to configure your integration in the Halcyon Platform."", ""instructions"": [{""parameters"": {""label"": ""Directory ID (Tenant ID)"", ""fillWith"": [""TenantId""]}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Application ID (Client ID)"", ""fillWith"": [""ApplicationId""], ""placeholder"": ""Deploy the Push Connector to get the App Registration Application ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Secret (Credential Secret) (THIS SECRET WILL NOT BE VISIBLE AFTER LEAVING THIS PAGE)"", ""fillWith"": [""ApplicationSecret""], ""placeholder"": ""Deploy the Push Connector to get the App Registration Secret""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Endpoint (URL)"", ""fillWith"": [""DataCollectionEndpoint""], ""placeholder"": ""Deploy the Push Connector to get the Data Collection Endpoint""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Rule Immutable ID (Rule ID)"", ""fillWith"": [""DataCollectionRuleId""], ""placeholder"": ""Deploy the Push Connector to get the Data Collection Rule ID""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace Permissions"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": false}}], ""customs"": [{""name"": ""Microsoft Entra Create Permissions"", ""description"": ""Permissions to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher.""}, {""name"": ""Role Assignment Permissions"", ""description"": ""Write permissions required to assign Monitoring Metrics Publisher role to the data collection rule (DCR). Typically requires Owner or User Access Administrator role at the resource group level.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Halcyon/Data%20Connectors/Halcyon_ccp/Halcyon_connectorDefinition.json","true" -"","HolmSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/HolmSecurity","holmsecurityswedenab1639511288603","holmsecurity_sc_sentinel","2022-07-18","","","Holm Security","Partner","https://support.holmsecurity.com/","","domains","","","","","","","false","","false" -"net_assets_CL","HolmSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/HolmSecurity","holmsecurityswedenab1639511288603","holmsecurity_sc_sentinel","2022-07-18","","","Holm Security","Partner","https://support.holmsecurity.com/","","domains","HolmSecurityAssets","Holm Security","Holm Security Asset Data","The connector provides the capability to poll data from Holm Security Center into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Holm Security Assets to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Holm Security API**\n\n [Follow these instructions](https://support.holmsecurity.com/knowledge/how-do-i-set-up-an-api-token) to create an API authentication token.""}, {""title"": """", ""description"": ""**STEP 2 - Use the below deployment option to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Holm Security connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Holm Security API authorization Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template Deployment"", ""description"": ""**Option 1 - Azure Resource Manager (ARM) Template**\n\nUse this method for automated deployment of the Holm Security connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-holmsecurityassets-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **API Username**, **API Password**, 'and/or Other required fields'. \n>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Holm Security API Token"", ""description"": ""Holm Security API Token is required. [Holm Security API Token](https://support.holmsecurity.com/)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/HolmSecurity/Data%20Connectors/HolmSecurityAssets_API_FunctionApp.json","true" -"web_assets_CL","HolmSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/HolmSecurity","holmsecurityswedenab1639511288603","holmsecurity_sc_sentinel","2022-07-18","","","Holm Security","Partner","https://support.holmsecurity.com/","","domains","HolmSecurityAssets","Holm Security","Holm Security Asset Data","The connector provides the capability to poll data from Holm Security Center into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Holm Security Assets to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Holm Security API**\n\n [Follow these instructions](https://support.holmsecurity.com/knowledge/how-do-i-set-up-an-api-token) to create an API authentication token.""}, {""title"": """", ""description"": ""**STEP 2 - Use the below deployment option to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Holm Security connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Holm Security API authorization Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template Deployment"", ""description"": ""**Option 1 - Azure Resource Manager (ARM) Template**\n\nUse this method for automated deployment of the Holm Security connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-holmsecurityassets-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **API Username**, **API Password**, 'and/or Other required fields'. \n>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Holm Security API Token"", ""description"": ""Holm Security API Token is required. [Holm Security API Token](https://support.holmsecurity.com/)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/HolmSecurity/Data%20Connectors/HolmSecurityAssets_API_FunctionApp.json","true" -"","HoneyTokens","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/HoneyTokens","","","","","","","","","","","","","","","","","false","","false" -"","IONIX","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IONIX","cyberpion1597832716616","cyberpion_mss","2022-05-02","","","IONIX","Partner","https://www.ionix.io/contact-us/","","domains","","","","","","","false","","false" -"CyberpionActionItems_CL","IONIX","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IONIX","cyberpion1597832716616","cyberpion_mss","2022-05-02","","","IONIX","Partner","https://www.ionix.io/contact-us/","","domains","CyberpionSecurityLogs","IONIX","IONIX Security Logs","The IONIX Security Logs data connector, ingests logs from the IONIX system directly into Sentinel. The connector allows users to visualize their data, create alerts and incidents and improve security investigations.","[{""title"": """", ""description"": ""Follow the [instructions](https://www.ionix.io/integrations/azure-sentinel/) to integrate IONIX Security Alerts into Sentinel."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""IONIX Subscription"", ""description"": ""a subscription and account is required for IONIX logs. [One can be acquired here.](https://azuremarketplace.microsoft.com/en/marketplace/apps/cyberpion1597832716616.cyberpion)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IONIX/Data%20Connectors/IONIXSecurityLogs.json","true" -"","IPQualityScore","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPQualityScore","ipqualityscorellc1632794263588","ipqs_1","2021-10-20","","","IPQS Plugins Team","Partner","https://www.ipqualityscore.com/contact-us","","domains","","","","","","","false","","false" -"","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","","","","","","","false","","false" -"Ipinfo_ASN_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoASNDataConnector","IPinfo","IPinfo ASN Data Connector","This IPinfo data connector installs an Azure Function app to download standard_ASN datasets and insert it into custom log table in Microsoft Sentinel","[{""title"": ""1. Retrieve API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application"", ""description"": ""In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link.""}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same \u201cResource Group\u201d you use for \u201cLog Analytic Workspace\u201d on which \u201cMicrosoft Sentinel\u201d is added: Use this Link.""}, {""title"": ""4. Get Workspace Resource ID"", ""description"": ""Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'""}, {""title"": ""5. Deploy the Azure Function"", ""description"": ""Use this for automated deployment of the IPinfo data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-ASN-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**.""}, {""title"": ""Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-ASN-functionapp). \n2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. \n3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. \n4. After successful deployment of the function app, follow the next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. \n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive):\n\t\tRESOURCE_ID\n\t\tIPINFO_TOKEN\n\t\tTENANT_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tRETENTION_IN_DAYS\n\t\tTOTAL_RETENTION_IN_DAYS\n\t\tSCHEDULE\n\t\tLOCATION \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""IPinfo API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/ASN/IPinfo_ASN_API_AzureFunctionApp.json","true" -"Ipinfo_Abuse_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoAbuseDataConnector","IPinfo","IPinfo Abuse Data Connector","This IPinfo data connector installs an Azure Function app to download standard_abuse datasets and insert it into custom log table in Microsoft Sentinel","[{""title"": ""1. Retrieve API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application"", ""description"": ""In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link.""}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same \u201cResource Group\u201d you use for \u201cLog Analytic Workspace\u201d on which \u201cMicrosoft Sentinel\u201d is added: Use this Link.""}, {""title"": ""4. Get Workspace Resource ID"", ""description"": ""Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'""}, {""title"": ""5. Deploy the Azure Function"", ""description"": ""Use this for automated deployment of the IPinfo data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-Abuse-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**.""}, {""title"": ""Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-Abuse-functionapp). \n2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. \n3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. \n4. After successful deployment of the function app, follow the next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. \n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive):\n\t\tRESOURCE_ID\n\t\tIPINFO_TOKEN\n\t\tTENANT_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tRETENTION_IN_DAYS\n\t\tTOTAL_RETENTION_IN_DAYS\n\t\tSCHEDULE\n\t\tLOCATION \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""IPinfo API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/Abuse/IPinfo_Abuse_API_AzureFunctionApp.json","true" -"Ipinfo_Carrier_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoCarrierDataConnector","IPinfo","IPinfo Carrier Data Connector","This IPinfo data connector installs an Azure Function app to download standard_carrier datasets and insert it into custom log table in Microsoft Sentinel","[{""title"": ""1. Retrieve API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application"", ""description"": ""In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link.""}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same \u201cResource Group\u201d you use for \u201cLog Analytic Workspace\u201d on which \u201cMicrosoft Sentinel\u201d is added: Use this Link.""}, {""title"": ""4. Get Workspace Resource ID"", ""description"": ""Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'""}, {""title"": ""5. Deploy the Azure Function"", ""description"": ""Use this for automated deployment of the IPinfo data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-Carrier-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**.""}, {""title"": ""Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-Carrier-functionapp). \n2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. \n3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. \n4. After successful deployment of the function app, follow the next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. \n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive):\n\t\tRESOURCE_ID\n\t\tIPINFO_TOKEN\n\t\tTENANT_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tRETENTION_IN_DAYS\n\t\tTOTAL_RETENTION_IN_DAYS\n\t\tSCHEDULE\n\t\tLOCATION \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""IPinfo API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/Carrier/IPinfo_Carrier_API_AzureFunctionApp.json","true" -"Ipinfo_Company_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoCompanyDataConnector","IPinfo","IPinfo Company Data Connector","This IPinfo data connector installs an Azure Function app to download standard_company datasets and insert it into custom log table in Microsoft Sentinel","[{""title"": ""1. Retrieve API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application"", ""description"": ""In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link.""}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same \u201cResource Group\u201d you use for \u201cLog Analytic Workspace\u201d on which \u201cMicrosoft Sentinel\u201d is added: Use this Link.""}, {""title"": ""4. Get Workspace Resource ID"", ""description"": ""Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'""}, {""title"": ""5. Deploy the Azure Function"", ""description"": ""Use this for automated deployment of the IPinfo data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-Company-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**.""}, {""title"": ""Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-Company-functionapp). \n2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. \n3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. \n4. After successful deployment of the function app, follow the next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. \n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive):\n\t\tRESOURCE_ID\n\t\tIPINFO_TOKEN\n\t\tTENANT_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tRETENTION_IN_DAYS\n\t\tTOTAL_RETENTION_IN_DAYS\n\t\tSCHEDULE\n\t\tLOCATION \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""IPinfo API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/Company/IPinfo_Company_API_AzureFunctionApp.json","true" -"Ipinfo_Country_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoCountryDataConnector","IPinfo","IPinfo Country ASN Data Connector","This IPinfo data connector installs an Azure Function app to download country_asn datasets and insert it into custom log table in Microsoft Sentinel","[{""title"": ""1. Retrieve API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application"", ""description"": ""In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link.""}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same \u201cResource Group\u201d you use for \u201cLog Analytic Workspace\u201d on which \u201cMicrosoft Sentinel\u201d is added: Use this Link.""}, {""title"": ""4. Get Workspace Resource ID"", ""description"": ""Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'""}, {""title"": ""5. Deploy the Azure Function"", ""description"": ""Use this for automated deployment of the IPinfo data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-Country-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**.""}, {""title"": ""Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-Country-functionapp). \n2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. \n3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. \n4. After successful deployment of the function app, follow the next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. \n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive):\n\t\tRESOURCE_ID\n\t\tIPINFO_TOKEN\n\t\tTENANT_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tRETENTION_IN_DAYS\n\t\tTOTAL_RETENTION_IN_DAYS\n\t\tSCHEDULE\n\t\tLOCATION \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""IPinfo API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/Country%20ASN/IPinfo_Country_API_AzureFunctionApp.json","true" -"Ipinfo_Domain_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoDomainDataConnector","IPinfo","IPinfo Domain Data Connector","This IPinfo data connector installs an Azure Function app to download standard_domain datasets and insert it into custom log table in Microsoft Sentinel","[{""title"": ""1. Retrieve API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application"", ""description"": ""In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link.""}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same \u201cResource Group\u201d you use for \u201cLog Analytic Workspace\u201d on which \u201cMicrosoft Sentinel\u201d is added: Use this Link.""}, {""title"": ""4. Get Workspace Resource ID"", ""description"": ""Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'""}, {""title"": ""5. Deploy the Azure Function"", ""description"": ""Use this for automated deployment of the IPinfo data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-Domain-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**.""}, {""title"": ""Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-Domain-functionapp). \n2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. \n3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. \n4. After successful deployment of the function app, follow the next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. \n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive):\n\t\tRESOURCE_ID\n\t\tIPINFO_TOKEN\n\t\tTENANT_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tRETENTION_IN_DAYS\n\t\tTOTAL_RETENTION_IN_DAYS\n\t\tSCHEDULE\n\t\tLOCATION \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""IPinfo API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/Domain/IPinfo_Domain_API_AzureFunctionApp.json","true" -"Ipinfo_Location_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoIplocationDataConnector","IPinfo","IPinfo Iplocation Data Connector","This IPinfo data connector installs an Azure Function app to download standard_location datasets and insert it into custom log table in Microsoft Sentinel","[{""title"": ""1. Retrieve API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application"", ""description"": ""In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link.""}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same \u201cResource Group\u201d you use for \u201cLog Analytic Workspace\u201d on which \u201cMicrosoft Sentinel\u201d is added: Use this Link.""}, {""title"": ""4. Get Workspace Resource ID"", ""description"": ""Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'""}, {""title"": ""5. Deploy the Azure Function"", ""description"": ""Use this for automated deployment of the IPinfo data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-Iplocation-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**.""}, {""title"": ""Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-Iplocation-functionapp). \n2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. \n3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. \n4. After successful deployment of the function app, follow the next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. \n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive):\n\t\tRESOURCE_ID\n\t\tIPINFO_TOKEN\n\t\tTENANT_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tRETENTION_IN_DAYS\n\t\tTOTAL_RETENTION_IN_DAYS\n\t\tSCHEDULE\n\t\tLOCATION \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""IPinfo API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/Iplocation/IPinfo_Iplocation_API_AzureFunctionApp.json","true" -"Ipinfo_Location_extended_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoIplocationExtendedDataConnector","IPinfo","IPinfo Iplocation Extended Data Connector","This IPinfo data connector installs an Azure Function app to download standard_location_extended datasets and insert it into custom log table in Microsoft Sentinel","[{""title"": ""1. Retrieve API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application"", ""description"": ""In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link.""}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same \u201cResource Group\u201d you use for \u201cLog Analytic Workspace\u201d on which \u201cMicrosoft Sentinel\u201d is added: Use this Link.""}, {""title"": ""4. Get Workspace Resource ID"", ""description"": ""Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'""}, {""title"": ""5. Deploy the Azure Function"", ""description"": ""Use this for automated deployment of the IPinfo data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-Iplocation-Extended-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**.""}, {""title"": ""Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-Iplocation-Extended-functionapp). \n2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. \n3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. \n4. After successful deployment of the function app, follow the next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. \n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive):\n\t\tRESOURCE_ID\n\t\tIPINFO_TOKEN\n\t\tTENANT_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tRETENTION_IN_DAYS\n\t\tTOTAL_RETENTION_IN_DAYS\n\t\tSCHEDULE\n\t\tLOCATION \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""IPinfo API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/Iplocation%20Extended/IPinfo_Iplocation_Extended_API_AzureFunctionApp.json","true" -"Ipinfo_Privacy_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoPrivacyDataConnector","IPinfo","IPinfo Privacy Data Connector","This IPinfo data connector installs an Azure Function app to download standard_privacy datasets and insert it into custom log table in Microsoft Sentinel","[{""title"": ""1. Retrieve API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application"", ""description"": ""In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link.""}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same \u201cResource Group\u201d you use for \u201cLog Analytic Workspace\u201d on which \u201cMicrosoft Sentinel\u201d is added: Use this Link.""}, {""title"": ""4. Get Workspace Resource ID"", ""description"": ""Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'""}, {""title"": ""5. Deploy the Azure Function"", ""description"": ""Use this for automated deployment of the IPinfo data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-Privacy-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**.""}, {""title"": ""Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-Privacy-functionapp). \n2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. \n3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. \n4. After successful deployment of the function app, follow the next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. \n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive):\n\t\tRESOURCE_ID\n\t\tIPINFO_TOKEN\n\t\tTENANT_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tRETENTION_IN_DAYS\n\t\tTOTAL_RETENTION_IN_DAYS\n\t\tSCHEDULE\n\t\tLOCATION \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""IPinfo API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/Privacy/IPinfo_Privacy_API_AzureFunctionApp.json","true" -"Ipinfo_Privacy_extended_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoPrivacyExtendedDataConnector","IPinfo","IPinfo Privacy Extended Data Connector","This IPinfo data connector installs an Azure Function app to download standard_privacy datasets and insert it into custom log table in Microsoft Sentinel","[{""title"": ""1. Retrieve API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application"", ""description"": ""In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link.""}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same \u201cResource Group\u201d you use for \u201cLog Analytic Workspace\u201d on which \u201cMicrosoft Sentinel\u201d is added: Use this Link.""}, {""title"": ""4. Get Workspace Resource ID"", ""description"": ""Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'""}, {""title"": ""5. Deploy the Azure Function"", ""description"": ""Use this for automated deployment of the IPinfo data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-Privacy-Extended-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**.""}, {""title"": ""Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-Privacy-Extended-functionapp). \n2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. \n3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. \n4. After successful deployment of the function app, follow the next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. \n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive):\n\t\tRESOURCE_ID\n\t\tIPINFO_TOKEN\n\t\tTENANT_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tRETENTION_IN_DAYS\n\t\tTOTAL_RETENTION_IN_DAYS\n\t\tSCHEDULE\n\t\tLOCATION \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""IPinfo API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/Privacy%20Extended/IPinfo_Privacy_Extended_API_AzureFunctionApp.json","true" -"Ipinfo_RIRWHOIS_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoRIRWHOISDataConnector","IPinfo","IPinfo RIRWHOIS Data Connector","This IPinfo data connector installs an Azure Function app to download RIRWHOIS datasets and insert it into custom log table in Microsoft Sentinel","[{""title"": ""1. Retrieve API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application"", ""description"": ""In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link.""}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same \u201cResource Group\u201d you use for \u201cLog Analytic Workspace\u201d on which \u201cMicrosoft Sentinel\u201d is added: Use this Link.""}, {""title"": ""4. Get Workspace Resource ID"", ""description"": ""Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'""}, {""title"": ""5. Deploy the Azure Function"", ""description"": ""Use this for automated deployment of the IPinfo data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-RIRWHOIS-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**.""}, {""title"": ""Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-RIRWHOIS-functionapp). \n2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. \n3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. \n4. After successful deployment of the function app, follow the next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. \n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive):\n\t\tRESOURCE_ID\n\t\tIPINFO_TOKEN\n\t\tTENANT_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tRETENTION_IN_DAYS\n\t\tTOTAL_RETENTION_IN_DAYS\n\t\tSCHEDULE\n\t\tLOCATION \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""IPinfo API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/RIRWHOIS/IPinfo_RIRWHOIS_API_AzureFunctionApp.json","true" -"Ipinfo_RWHOIS_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoRWHOISDataConnector","IPinfo","IPinfo RWHOIS Data Connector","This IPinfo data connector installs an Azure Function app to download RWHOIS datasets and insert it into custom log table in Microsoft Sentinel","[{""title"": ""1. Retrieve API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application"", ""description"": ""In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link.""}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same \u201cResource Group\u201d you use for \u201cLog Analytic Workspace\u201d on which \u201cMicrosoft Sentinel\u201d is added: Use this Link.""}, {""title"": ""4. Get Workspace Resource ID"", ""description"": ""Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'""}, {""title"": ""5. Deploy the Azure Function"", ""description"": ""Use this for automated deployment of the IPinfo data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-RWHOIS-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**.""}, {""title"": ""Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-RWHOIS-functionapp). \n2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. \n3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. \n4. After successful deployment of the function app, follow the next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. \n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive):\n\t\tRESOURCE_ID\n\t\tIPINFO_TOKEN\n\t\tTENANT_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tRETENTION_IN_DAYS\n\t\tTOTAL_RETENTION_IN_DAYS\n\t\tSCHEDULE\n\t\tLOCATION \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""IPinfo API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/RWHOIS/IPinfo_RWHOIS_API_AzureFunctionApp.json","true" -"Ipinfo_WHOIS_ASN_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoWHOISASNDataConnector","IPinfo","IPinfo WHOIS ASN Data Connector","This IPinfo data connector installs an Azure Function app to download WHOIS_ASN datasets and insert it into custom log table in Microsoft Sentinel","[{""title"": ""1. Retrieve API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application"", ""description"": ""In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link.""}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same \u201cResource Group\u201d you use for \u201cLog Analytic Workspace\u201d on which \u201cMicrosoft Sentinel\u201d is added: Use this Link.""}, {""title"": ""4. Get Workspace Resource ID"", ""description"": ""Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'""}, {""title"": ""5. Deploy the Azure Function"", ""description"": ""Use this for automated deployment of the IPinfo data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-WHOIS-ASN-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**.""}, {""title"": ""Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-WHOIS-ASN-functionapp). \n2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. \n3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. \n4. After successful deployment of the function app, follow the next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. \n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive):\n\t\tRESOURCE_ID\n\t\tIPINFO_TOKEN\n\t\tTENANT_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tRETENTION_IN_DAYS\n\t\tTOTAL_RETENTION_IN_DAYS\n\t\tSCHEDULE\n\t\tLOCATION \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""IPinfo API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/WHOIS%20ASN/IPinfo_WHOIS_ASN_API_AzureFunctionApp.json","true" -"Ipinfo_WHOIS_MNT_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoWHOISMNTDataConnector","IPinfo","IPinfo WHOIS MNT Data Connector","This IPinfo data connector installs an Azure Function app to download WHOIS_MNT datasets and insert it into custom log table in Microsoft Sentinel","[{""title"": ""1. Retrieve API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application"", ""description"": ""In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link.""}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same \u201cResource Group\u201d you use for \u201cLog Analytic Workspace\u201d on which \u201cMicrosoft Sentinel\u201d is added: Use this Link.""}, {""title"": ""4. Get Workspace Resource ID"", ""description"": ""Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'""}, {""title"": ""5. Deploy the Azure Function"", ""description"": ""Use this for automated deployment of the IPinfo data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-WHOIS-MNT-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**.""}, {""title"": ""Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-WHOIS-MNT-functionapp). \n2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. \n3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. \n4. After successful deployment of the function app, follow the next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. \n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive):\n\t\tRESOURCE_ID\n\t\tIPINFO_TOKEN\n\t\tTENANT_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tRETENTION_IN_DAYS\n\t\tTOTAL_RETENTION_IN_DAYS\n\t\tSCHEDULE\n\t\tLOCATION \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""IPinfo API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/WHOIS%20MNT/IPinfo_WHOIS_MNT_API_AzureFunctionApp.json","true" -"Ipinfo_WHOIS_NET_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoWHOISNETDataConnector","IPinfo","IPinfo WHOIS NET Data Connector","This IPinfo data connector installs an Azure Function app to download WHOIS_NET datasets and insert it into custom log table in Microsoft Sentinel","[{""title"": ""1. Retrieve API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application"", ""description"": ""In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link.""}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same \u201cResource Group\u201d you use for \u201cLog Analytic Workspace\u201d on which \u201cMicrosoft Sentinel\u201d is added: Use this Link.""}, {""title"": ""4. Get Workspace Resource ID"", ""description"": ""Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'""}, {""title"": ""5. Deploy the Azure Function"", ""description"": ""Use this for automated deployment of the IPinfo data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-WHOIS-NET-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**.""}, {""title"": ""Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-WHOIS-NET-functionapp). \n2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. \n3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. \n4. After successful deployment of the function app, follow the next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. \n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive):\n\t\tRESOURCE_ID\n\t\tIPINFO_TOKEN\n\t\tTENANT_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tRETENTION_IN_DAYS\n\t\tTOTAL_RETENTION_IN_DAYS\n\t\tSCHEDULE\n\t\tLOCATION \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""IPinfo API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/WHOIS%20NET/IPinfo_WHOIS_NET_API_AzureFunctionApp.json","true" -"Ipinfo_WHOIS_ORG_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoWHOISORGDataConnector","IPinfo","IPinfo WHOIS ORG Data Connector","This IPinfo data connector installs an Azure Function app to download WHOIS_ORG datasets and insert it into custom log table in Microsoft Sentinel","[{""title"": ""1. Retrieve API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application"", ""description"": ""In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link.""}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same \u201cResource Group\u201d you use for \u201cLog Analytic Workspace\u201d on which \u201cMicrosoft Sentinel\u201d is added: Use this Link.""}, {""title"": ""4. Get Workspace Resource ID"", ""description"": ""Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'""}, {""title"": ""5. Deploy the Azure Function"", ""description"": ""Use this for automated deployment of the IPinfo data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-WHOIS-ORG-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**.""}, {""title"": ""Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-WHOIS-ORG-functionapp). \n2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. \n3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. \n4. After successful deployment of the function app, follow the next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. \n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive):\n\t\tRESOURCE_ID\n\t\tIPINFO_TOKEN\n\t\tTENANT_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tRETENTION_IN_DAYS\n\t\tTOTAL_RETENTION_IN_DAYS\n\t\tSCHEDULE\n\t\tLOCATION \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""IPinfo API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/WHOIS%20ORG/IPinfo_WHOIS_ORG_API_AzureFunctionApp.json","true" -"Ipinfo_WHOIS_POC_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoWHOISPOCDataConnector","IPinfo","IPinfo WHOIS POC Data Connector","This IPinfo data connector installs an Azure Function app to download WHOIS_POC datasets and insert it into custom log table in Microsoft Sentinel","[{""title"": ""1. Retrieve API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application"", ""description"": ""In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link.""}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same \u201cResource Group\u201d you use for \u201cLog Analytic Workspace\u201d on which \u201cMicrosoft Sentinel\u201d is added: Use this Link.""}, {""title"": ""4. Get Workspace Resource ID"", ""description"": ""Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'""}, {""title"": ""5. Deploy the Azure Function"", ""description"": ""Use this for automated deployment of the IPinfo data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-WHOIS-POC-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**.""}, {""title"": ""Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-WHOIS-POC-functionapp). \n2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. \n3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. \n4. After successful deployment of the function app, follow the next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. \n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive):\n\t\tRESOURCE_ID\n\t\tIPINFO_TOKEN\n\t\tTENANT_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tRETENTION_IN_DAYS\n\t\tTOTAL_RETENTION_IN_DAYS\n\t\tSCHEDULE\n\t\tLOCATION \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""IPinfo API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/WHOIS%20POC/IPinfo_WHOIS_POC_API_AzureFunctionApp.json","true" -"","ISC Bind","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ISC%20Bind","azuresentinel","azure-sentinel-solution-iscbind","2022-09-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"Syslog","ISC Bind","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ISC%20Bind","azuresentinel","azure-sentinel-solution-iscbind","2022-09-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ISCBind","ISC","[Deprecated] ISC Bind","The [ISC Bind](https://www.isc.org/bind/) connector allows you to easily connect your ISC Bind logs with Microsoft Sentinel. This gives you more insight into your organization's network traffic data, DNS query data, traffic statistics and improves your security operation capabilities.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias ISCBind and load the function code or click [here](https://aka.ms/sentinel-iscbind-parser).The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n 2. Select **Apply below configuration to my machines** and select the facilities and severities.\n 3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure and connect the ISC Bind"", ""description"": ""1. Follow these instructions to configure the ISC Bind to forward syslog: \n - [DNS Logs](https://kb.isc.org/docs/aa-01526) \n2. Configure Syslog to send the Syslog traffic to Agent. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""ISC Bind"", ""description"": ""must be configured to export logs via Syslog""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ISC%20Bind/Data%20Connectors/Connector_Syslog_ISCBind.json","true" -"","Illumio Core","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illumio%20Core","azuresentinel","azure-sentinel-solution-illumiocore","2022-05-26","","","Microsoft","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"CommonSecurityLog","Illumio Core","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illumio%20Core","azuresentinel","azure-sentinel-solution-illumiocore","2022-05-26","","","Microsoft","Microsoft","https://support.microsoft.com","","domains","IllumioCore","Illumio","[Deprecated] Illumio Core via Legacy Agent","The [Illumio Core](https://www.illumio.com/products/) data connector provides the capability to ingest Illumio Core logs into Microsoft Sentinel.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias IllumioCoreEvent and load the function code or click [here](https://aka.ms/sentinel-IllumioCore-parser).The function usually takes 10-15 minutes to activate after solution installation/update and maps Illumio Core events to Microsoft Sentinel Information Model (ASIM).""}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Configure Ilumio Core to send logs using CEF"", ""description"": ""2.1 Configure Event Format\n\n 1. From the PCE web console menu, choose **Settings > Event Settings** to view your current settings.\n\n 2. Click **Edit** to change the settings.\n\n 3. Set **Event Format** to CEF.\n\n 4. (Optional) Configure **Event Severity** and **Retention Period**.\n\n2.2 Configure event forwarding to an external syslog server\n\n 1. From the PCE web console menu, choose **Settings > Event Settings**.\n\n 2. Click **Add**.\n\n 3. Click **Add Repository**.\n\n 4. Complete the **Add Repository** dialog.\n\n 5. Click **OK** to save the event forwarding configuration.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illumio%20Core/Data%20Connectors/Connector_IllumioCore_CEF.json","true" -"CommonSecurityLog","Illumio Core","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illumio%20Core","azuresentinel","azure-sentinel-solution-illumiocore","2022-05-26","","","Microsoft","Microsoft","https://support.microsoft.com","","domains","IllumioCoreAma","Illumio","[Deprecated] Illumio Core via AMA","The [Illumio Core](https://www.illumio.com/products/) data connector provides the capability to ingest Illumio Core logs into Microsoft Sentinel.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias IllumioCoreEvent and load the function code or click [here](https://aka.ms/sentinel-IllumioCore-parser).The function usually takes 10-15 minutes to activate after solution installation/update and maps Illumio Core events to Microsoft Sentinel Information Model (ASIM)."", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine.""}, {""title"": ""Step B. Configure Ilumio Core to send logs using CEF"", ""description"": ""Configure Event Format\n\n 1. From the PCE web console menu, choose **Settings > Event Settings** to view your current settings.\n\n 2. Click **Edit** to change the settings.\n\n 3. Set **Event Format** to CEF.\n\n 4. (Optional) Configure **Event Severity** and **Retention Period**.\n\nConfigure event forwarding to an external syslog server\n\n 1. From the PCE web console menu, choose **Settings > Event Settings**.\n\n 2. Click **Add**.\n\n 3. Click **Add Repository**.\n\n 4. Complete the **Add Repository** dialog.\n\n 5. Click **OK** to save the event forwarding configuration.""}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illumio%20Core/Data%20Connectors/template_IllumioCoreAMA.json","true" -"","Illumio Insight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illumio%20Insight","illumioinc1629822633689","azure-sentinel-solution-illumioinsight","2025-08-10","","","Illumio","Partner","https://www.illumio.com/support/support","","domains","","","","","","","false","","false" -"IllumioInsights_CL","Illumio Insight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illumio%20Insight","illumioinc1629822633689","azure-sentinel-solution-illumioinsight","2025-08-10","","","Illumio","Partner","https://www.illumio.com/support/support","","domains","IllumioInsightsDefinition","Microsoft","Illumio Insights","Illumio Insights Connector sends workload and security graph data from Illumio Insights into the Azure Microsoft Sentinel Data Lake, providing deep context for threat detection, lateral movement analysis, and real-time investigation.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the Illumio Insights Connector\n\n**Prerequisites**\n- Register and Login to Illumio Console with valid credentials\n- Purchase Illumio Insights or Start a free Trial for Illumio Insights\n\n**Step 1: Register the Service Account**\n1. Go to **Illumio Console \u2192 Access \u2192 Service Accounts**\n2. Create a service account for the tenant\n3. Once you create a service account, you will receive the client credentials\n4. Copy the **auth_username** (Illumio Insights API Key) and the **Secret** (API Secret)\n\n**Step 2: Add Client Credentials to Sentinel Account**\n- Add the API key and secret to Sentinel Account for tenant authentication\n- These credentials will be used to authenticate calls to the Illumio SaaS API\n\nPlease fill in the required fields below with the credentials obtained from the Illumio Console:""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Illumio Insights Api Key"", ""placeholder"": ""api_XXXXXX"", ""type"": ""password"", ""name"": ""apiKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Api Secret"", ""placeholder"": ""API Secret"", ""type"": ""password"", ""name"": ""apiToken""}}, {""parameters"": {""label"": ""Illumio Tenant Id"", ""placeholder"": ""{IllumioTenantId - Optional}"", ""type"": ""text"", ""name"": ""illumioTenantId""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illumio%20Insight/Data%20Connectors/IllumioInsight_CCP/IllumioInsight_Definition.json","true" -"IllumioInsightsSummary_CL","Illumio Insight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illumio%20Insight","illumioinc1629822633689","azure-sentinel-solution-illumioinsight","2025-08-10","","","Illumio","Partner","https://www.illumio.com/support/support","","domains","IllumioInsightsSummaryCCP","Illumio","Illumio Insights Summary","The Illumio Insights Summary connector Publishes AI-powered threat discovery and anomaly reports generated by the Illumio Insights Agent. Leveraging the MITRE ATT&CK framework, these reports surface high-fidelity insights into emerging threats and risky behaviors, directly into the Data Lake.","[{""title"": ""1. Configuration"", ""description"": ""Configure the Illumio Insights Summary connector."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""> This data connector may take 24 hrs for the latest report after onboarding""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the Illumio Insights Summary Connector\n\n**Prerequisites**\n- Register and Login to Illumio Console with valid credentials\n- Purchase Illumio Insights or Start a free Trial for Illumio Insights\n- Enable The Illumio Insights Agent\n\n**Step 1: Register the Service Account**\n1. Go to **Illumio Console \u2192 Access \u2192 Service Accounts**\n2. Create a service account for the tenant\n3. Once you create a service account, you will receive the client credentials\n4. Copy the **auth_username** (Illumio Insights API Key) and the **Secret** (API Secret)\n\n**Step 2: Add Client Credentials to Sentinel Account**\n- Add the API key and secret to Sentinel Account for tenant authentication\n- These credentials will be used to authenticate calls to the Illumio SaaS API \n\nPlease fill in the required fields below with the credentials obtained from the Illumio Console:""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Illumio Insights Api Key"", ""placeholder"": ""api_XXXXXX"", ""type"": ""password"", ""name"": ""apiKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Api Secret"", ""placeholder"": ""API Secret"", ""type"": ""password"", ""name"": ""apiToken""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Illumio Tenant ID"", ""placeholder"": ""{IllumioTenantId - Optional}"", ""type"": ""text"", ""name"": ""illumioTenantId""}}]}, {""title"": ""2. Connect"", ""description"": ""Enable the Illumio Insights Summary connector."", ""instructions"": [{""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""disconnectLabel"": ""Disconnect"", ""name"": ""toggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illumio%20Insight/Data%20Connectors/IllumioInsightsSummaryConnector_CCP/IllumioInsightsSummary_ConnectorDefinition.json","true" -"","IllumioSaaS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IllumioSaaS","illumioinc1629822633689","illumio_sentinel","2024-05-13","","","Illumio","Partner","https://www.illumio.com/support/support","","domains","","","","","","","false","","false" -"Illumio_Auditable_Events_CL","IllumioSaaS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IllumioSaaS","illumioinc1629822633689","illumio_sentinel","2024-05-13","","","Illumio","Partner","https://www.illumio.com/support/support","","domains","IllumioSaaSDataConnector","Illumio","Illumio SaaS","[Illumio](https://www.illumio.com/) connector provides the capability to ingest events into Microsoft Sentinel. The connector provides ability to ingest auditable and flow events from AWS S3 bucket.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the AWS SQS / S3 to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\n\n>**(Optional Step)** Securely store API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Prerequisites"", ""description"": ""1. Ensure AWS SQS is configured for the s3 bucket from which flow and auditable event logs are going to be pulled. In case, Illumio provides bucket, please contact Illumio support for sqs url, s3 bucket name and aws credentials. \n 2. Register AAD application - For DCR (Data collection rule) to authentiate to ingest data into log analytics, you must use Entra application. 1. [Follow the instructions here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-azure-ad-application) (steps 1-5) to get **AAD Tenant Id**, **AAD Client Id** and **AAD Client Secret**. \n 2. Ensure you have created a log analytics workspace. \nPlease keep note of the name and region where it has been deployed.""}, {""title"": ""Deployment"", ""description"": ""Choose one of the approaches from below options. Either use the below ARM template to deploy azure resources or deploy function app manually.""}, {""title"": ""1. Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of Azure resources using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IllumioSaaS-FunctionApp) \t\t\t\n2. Provide the required details such as Microsoft Sentinel Workspace, AWS credentials, Azure AD Application details and ingestion configurations \n> **NOTE:** It is recommended to create a new Resource Group for deployment of function app and associated resources.\n3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n4. Click **Purchase** to deploy.""}, {""title"": ""2. Deploy additional function apps to handle scale"", ""description"": ""Use this method for automated deployment of additional function apps using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IllumioSaaS-QueueTriggerFunctionApp) \t\t\t\n""}, {""title"": ""3. Manual Deployment of Azure Functions"", ""description"": ""Deployment via Visual Studio Code.""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n1. Download the [Azure Function App](https://github.com/Azure/Azure-Sentinel/raw/master/Solutions/IllumioSaaS/Data%20Connectors/IllumioEventsConn.zip) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. Follow documentation to set up all required environment variables and click **Save**. Ensure you restart the function app once settings are saved.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""SQS and AWS S3 account credentials/permissions"", ""description"": ""**AWS_SECRET**, **AWS_REGION_NAME**, **AWS_KEY**, **QUEUE_URL** is required. [See the documentation to learn more about data pulling](). If you are using s3 bucket provided by Illumio, contact Illumio support. At your request they will provide you with the AWS S3 bucket name, AWS SQS url and AWS credentials to access them.""}, {""name"": ""Illumio API key and secret"", ""description"": ""**ILLUMIO_API_KEY**, **ILLUMIO_API_SECRET** is required for a workbook to make connection to SaaS PCE and fetch api responses.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IllumioSaaS/Data%20Connectors/IllumioSaaS_FunctionApp.json","true" -"Illumio_Flow_Events_CL","IllumioSaaS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IllumioSaaS","illumioinc1629822633689","illumio_sentinel","2024-05-13","","","Illumio","Partner","https://www.illumio.com/support/support","","domains","IllumioSaaSDataConnector","Illumio","Illumio SaaS","[Illumio](https://www.illumio.com/) connector provides the capability to ingest events into Microsoft Sentinel. The connector provides ability to ingest auditable and flow events from AWS S3 bucket.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the AWS SQS / S3 to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\n\n>**(Optional Step)** Securely store API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Prerequisites"", ""description"": ""1. Ensure AWS SQS is configured for the s3 bucket from which flow and auditable event logs are going to be pulled. In case, Illumio provides bucket, please contact Illumio support for sqs url, s3 bucket name and aws credentials. \n 2. Register AAD application - For DCR (Data collection rule) to authentiate to ingest data into log analytics, you must use Entra application. 1. [Follow the instructions here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-azure-ad-application) (steps 1-5) to get **AAD Tenant Id**, **AAD Client Id** and **AAD Client Secret**. \n 2. Ensure you have created a log analytics workspace. \nPlease keep note of the name and region where it has been deployed.""}, {""title"": ""Deployment"", ""description"": ""Choose one of the approaches from below options. Either use the below ARM template to deploy azure resources or deploy function app manually.""}, {""title"": ""1. Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of Azure resources using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IllumioSaaS-FunctionApp) \t\t\t\n2. Provide the required details such as Microsoft Sentinel Workspace, AWS credentials, Azure AD Application details and ingestion configurations \n> **NOTE:** It is recommended to create a new Resource Group for deployment of function app and associated resources.\n3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n4. Click **Purchase** to deploy.""}, {""title"": ""2. Deploy additional function apps to handle scale"", ""description"": ""Use this method for automated deployment of additional function apps using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IllumioSaaS-QueueTriggerFunctionApp) \t\t\t\n""}, {""title"": ""3. Manual Deployment of Azure Functions"", ""description"": ""Deployment via Visual Studio Code.""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n1. Download the [Azure Function App](https://github.com/Azure/Azure-Sentinel/raw/master/Solutions/IllumioSaaS/Data%20Connectors/IllumioEventsConn.zip) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. Follow documentation to set up all required environment variables and click **Save**. Ensure you restart the function app once settings are saved.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""SQS and AWS S3 account credentials/permissions"", ""description"": ""**AWS_SECRET**, **AWS_REGION_NAME**, **AWS_KEY**, **QUEUE_URL** is required. [See the documentation to learn more about data pulling](). If you are using s3 bucket provided by Illumio, contact Illumio support. At your request they will provide you with the AWS S3 bucket name, AWS SQS url and AWS credentials to access them.""}, {""name"": ""Illumio API key and secret"", ""description"": ""**ILLUMIO_API_KEY**, **ILLUMIO_API_SECRET** is required for a workbook to make connection to SaaS PCE and fetch api responses.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IllumioSaaS/Data%20Connectors/IllumioSaaS_FunctionApp.json","true" -"IllumioFlowEventsV2_CL","IllumioSaaS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IllumioSaaS","illumioinc1629822633689","illumio_sentinel","2024-05-13","","","Illumio","Partner","https://www.illumio.com/support/support","","domains","IllumioSaasCCFDefinition","Microsoft","Illumio Saas","The Illumio Saas Cloud data connector provides the capability to ingest Flow logs into Microsoft Sentinel using the Illumio Saas Log Integration through AWS S3 Bucket. Refer to [Illumio Saas Log Integration](https://product-docs-repo.illumio.com/Tech-Docs/CloudSecure/out/en/administer-cloudsecure/connector.html#UUID-c14edaab-9726-1f23-9c4c-bc2937be39ee_section-idm234556433515698) for more information.","[{""title"": ""Connect Illumio Saas to Microsoft Sentinel\n\n"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": "">**NOTE:** This connector fetches the Illumio Saas Flow logs from AWS S3 bucket""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""To gather data from Illumio, you need to configure the following resources""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. AWS Role ARN \n To gather data from Illumio, you'll need AWS Role ARN.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. AWS SQS Queue URL \n To gather data from Illumio, you'll need AWS SQS Queue URL.\n\n""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""For detailed steps to retrieve the AWS Role ARN, SQS Queue URL, and configure Illumio log forwarding to the Amazon S3 bucket, refer to the [Connector Setup Guide](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IllumioSaaS/Data%20Connectors/IllumioSaasLogs_ccf/Readme.md).""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""AWS Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""AWS SQS Queue URL""}, {""columnValue"": ""properties.destinationTable"", ""columnName"": ""Table Name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add Account"", ""title"": ""Add Account"", ""subtitle"": ""Add Account"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""placeholder"": ""Enter Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Flow Log Queue URL"", ""placeholder"": ""Enter Flow log SQL Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""required"": true}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IllumioSaaS/Data%20Connectors/IllumioSaasLogs_ccf/IllumioSaasLogs_ConnectorDefinition.json","true" -"","Illusive Active Defense","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illusive%20Active%20Defense","","","","","","","","","","","","","","","","","false","","false" -"","Illusive Platform","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illusive%20Platform","illusivenetworks","illusive_platform_mss","2022-05-25","","","Illusive Networks","Partner","https://illusive.com/support","","domains","","","","","","","false","","false" -"CommonSecurityLog","Illusive Platform","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illusive%20Platform","illusivenetworks","illusive_platform_mss","2022-05-25","","","Illusive Networks","Partner","https://illusive.com/support","","domains","illusiveAttackManagementSystem","illusive","[Deprecated] Illusive Platform via Legacy Agent","The Illusive Platform Connector allows you to share Illusive's attack surface analysis data and incident logs with Microsoft Sentinel and view this information in dedicated dashboards that offer insight into your organization's attack surface risk (ASM Dashboard) and track unauthorized lateral movement in your organization's network (ADS Dashboard).","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Illusive Common Event Format (CEF) logs to Syslog agent"", ""description"": ""1. Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address.\n> 2. Log onto the Illusive Console, and navigate to Settings->Reporting.\n> 3. Find Syslog Servers\n> 4. Supply the following information:\n>> 1. Host name: Linux Syslog agent IP address or FQDN host name\n>> 2. Port: 514\n>> 3. Protocol: TCP\n>> 4. Audit messages: Send audit messages to server\n> 5. To add the syslog server, click Add.\n> 6. For more information about how to add a new syslog server in the Illusive platform, please find the Illusive Networks Admin Guide in here: https://support.illusivenetworks.com/hc/en-us/sections/360002292119-Documentation-by-Version""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illusive%20Platform/Data%20Connectors/illusive%20Attack%20Management%20System.json","true" -"CommonSecurityLog","Illusive Platform","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illusive%20Platform","illusivenetworks","illusive_platform_mss","2022-05-25","","","Illusive Networks","Partner","https://illusive.com/support","","domains","illusiveAttackManagementSystemAma","illusive","[Deprecated] Illusive Platform via AMA","The Illusive Platform Connector allows you to share Illusive's attack surface analysis data and incident logs with Microsoft Sentinel and view this information in dedicated dashboards that offer insight into your organization's attack surface risk (ASM Dashboard) and track unauthorized lateral movement in your organization's network (ADS Dashboard).","[{""title"": """", ""description"": """", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine""}, {""title"": ""Step B. Forward Illusive Common Event Format (CEF) logs to Syslog agent"", ""description"": ""1. Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address.\n> 2. Log onto the Illusive Console, and navigate to Settings->Reporting.\n> 3. Find Syslog Servers\n> 4. Supply the following information:\n>> 1. Host name: Linux Syslog agent IP address or FQDN host name\n>> 2. Port: 514\n>> 3. Protocol: TCP\n>> 4. Audit messages: Send audit messages to server\n> 5. To add the syslog server, click Add.\n> 6. For more information about how to add a new syslog server in the Illusive platform, please find the Illusive Networks Admin Guide in here: https://support.illusivenetworks.com/hc/en-us/sections/360002292119-Documentation-by-Version""}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illusive%20Platform/Data%20Connectors/template_IllusivePlatformAMA.json","true" -"","Images","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Images","","","","","","","","","","","","","","","","","false","","false" -"","Imperva WAF Gateway","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Imperva%20WAF%20Gateway","imperva","Imperva_WAF_Gateway_MSS","2022-05-02","","","Imperva","Partner","https://www.imperva.com/support/technical-support/","","domains","","","","","","","false","","false" -"CommonSecurityLog","Imperva WAF Gateway","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Imperva%20WAF%20Gateway","imperva","Imperva_WAF_Gateway_MSS","2022-05-02","","","Imperva","Partner","https://www.imperva.com/support/technical-support/","","domains","ImpervaWAFGateway","Imperva","Imperva WAF Gateway","The [Imperva](https://www.imperva.com) connector will allow you to quickly connect your Imperva WAF Gateway alerts to Azure Sentinel. This provides you additional insight into your organization's WAF traffic and improves your security operation capabilities.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Azure Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Azure Sentinel will use as the proxy between your security solution and Azure Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Azure Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address.""}, {""title"": ""3. SecureSphere MX Configuration"", ""description"": ""This connector requires an Action Interface and Action Set to be created on the Imperva SecureSphere MX. [Follow the steps](https://community.imperva.com/blogs/craig-burlingame1/2020/11/13/steps-for-enabling-imperva-waf-gateway-alert) to create the requirements."", ""innerSteps"": [{""title"": ""3.1 Create the Action Interface"", ""description"": ""Create a new Action Interface that contains the required parameters to send WAF alerts to Azure Sentinel.""}, {""title"": ""3.2 Create the Action Set "", ""description"": ""Create a new Action Set that uses the Action Interface configured.""}, {""title"": ""3.3 Apply the Action Set"", ""description"": ""Apply the Action Set to any Security Policies you wish to have alerts for sent to Azure Sentinel.""}]}, {""title"": ""4. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n"", ""innerSteps"": [{""title"": ""4.1 Check for logs in the past 5 minutes using the following command.\n\nCommonSecurityLog | where DeviceVendor == \""Imperva Inc.\"" | where DeviceProduct == \""WAF Gateway\"" | where TimeGenerated == ago(5m)""}]}, {""title"": ""5. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Imperva%20WAF%20Gateway/Data%20Connectors/Connector_Imperva_WAF_Gateway.json","true" -"","ImpervaCloudWAF","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ImpervaCloudWAF","azuresentinel","azure-sentinel-solution-impervawafcloud","2021-09-28","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"ImpervaWAFCloudV2_CL","ImpervaCloudWAF","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ImpervaCloudWAF","azuresentinel","azure-sentinel-solution-impervawafcloud","2021-09-28","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","ImpervaCloudWAFLogsCCFDefinition","Microsoft","Imperva Cloud WAF","The Imperva WAF Cloud data connector provides the capability to ingest logs into Microsoft Sentinel using the Imperva Log Integration through AWS S3 Bucket. Refer to [Imperva WAF Cloud Log Integration](https://docs.imperva.com/bundle/cloud-application-security/page/settings/log-integration.htm) for more information.","[{""title"": ""Connect Imperva WAF Cloud to Microsoft Sentinel\n\n"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": "">**NOTE:** This connector fetches the Imperva Cloud WAF logs from AWS S3 bucket""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""To gather data from Imperva, you need to configure the following resources""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. AWS Role ARN \n To gather data from Imperva, you'll need AWS Role ARN.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. AWS SQS Queue URL \n To gather data from Imperva, you'll need AWS SQS Queue URL.\n\n""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""For detailed steps to retrieve the AWS Role ARN, SQS Queue URL, and configure Imperva log forwarding to the Amazon S3 bucket, refer to the [Connector Setup Guide](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ImpervaCloudWAF/Data%20Connectors/Readme.md).""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""AWS Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""AWS SQS Queue URL""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add Account"", ""title"": ""Add Account"", ""subtitle"": ""Add Account"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""placeholder"": ""Enter Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""placeholder"": ""Enter SQL Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""required"": true}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ImpervaCloudWAF/Data%20Connectors/ImpervaCloudWAFLogs_ccf/ImpervaCloudWAFLogs_ConnectorDefinition.json","true" -"ImpervaWAFCloudV2_CL","ImpervaCloudWAF","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ImpervaCloudWAF","azuresentinel","azure-sentinel-solution-impervawafcloud","2021-09-28","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","ImpervaWAFCloudAPI","Imperva","Imperva Cloud WAF","The [Imperva Cloud WAF](https://www.imperva.com/resources/resource-library/datasheets/imperva-cloud-waf/) data connector provides the capability to integrate and ingest Web Application Firewall events into Microsoft Sentinel through the REST API. Refer to Log integration [documentation](https://docs.imperva.com/bundle/cloud-application-security/page/settings/log-integration.htm#Download) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Imperva Cloud API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""description"": "">**NOTE:**This data connector depends on a parser based on a Kusto Function to work as expected [**ImpervaWAFCloud**](https://aka.ms/sentinel-impervawafcloud-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Log Integration**\n\n [Follow the instructions](https://docs.imperva.com/bundle/cloud-application-security/page/settings/log-integration.htm#Setuplogintegration) to obtain the credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Imperva Cloud WAF data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-impervawafcloud-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **ImpervaAPIID**, **ImpervaAPIKey**, **ImpervaLogServerURI** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Imperva Cloud WAF data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-impervawafcloud-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ImpervaCloudXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tImpervaAPIID\n\t\tImpervaAPIKey\n\t\tImpervaLogServerURI\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**ImpervaAPIID**, **ImpervaAPIKey**, **ImpervaLogServerURI** are required for the API. [See the documentation to learn more about Setup Log Integration process](https://docs.imperva.com/bundle/cloud-application-security/page/settings/log-integration.htm#Setuplogintegration). Check all [requirements and follow the instructions](https://docs.imperva.com/bundle/cloud-application-security/page/settings/log-integration.htm#Setuplogintegration) for obtaining credentials. Please note that this connector uses CEF log event format. [More information](https://docs.imperva.com/bundle/cloud-application-security/page/more/log-file-structure.htm#Logfilestructure) about log format.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ImpervaCloudWAF/Data%20Connectors/ImpervaWAFCloud_FunctionApp.json","true" -"ImpervaWAFCloud_CL","ImpervaCloudWAF","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ImpervaCloudWAF","azuresentinel","azure-sentinel-solution-impervawafcloud","2021-09-28","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","ImpervaWAFCloudAPI","Imperva","Imperva Cloud WAF","The [Imperva Cloud WAF](https://www.imperva.com/resources/resource-library/datasheets/imperva-cloud-waf/) data connector provides the capability to integrate and ingest Web Application Firewall events into Microsoft Sentinel through the REST API. Refer to Log integration [documentation](https://docs.imperva.com/bundle/cloud-application-security/page/settings/log-integration.htm#Download) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Imperva Cloud API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""description"": "">**NOTE:**This data connector depends on a parser based on a Kusto Function to work as expected [**ImpervaWAFCloud**](https://aka.ms/sentinel-impervawafcloud-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Log Integration**\n\n [Follow the instructions](https://docs.imperva.com/bundle/cloud-application-security/page/settings/log-integration.htm#Setuplogintegration) to obtain the credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Imperva Cloud WAF data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-impervawafcloud-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **ImpervaAPIID**, **ImpervaAPIKey**, **ImpervaLogServerURI** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Imperva Cloud WAF data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-impervawafcloud-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ImpervaCloudXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tImpervaAPIID\n\t\tImpervaAPIKey\n\t\tImpervaLogServerURI\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**ImpervaAPIID**, **ImpervaAPIKey**, **ImpervaLogServerURI** are required for the API. [See the documentation to learn more about Setup Log Integration process](https://docs.imperva.com/bundle/cloud-application-security/page/settings/log-integration.htm#Setuplogintegration). Check all [requirements and follow the instructions](https://docs.imperva.com/bundle/cloud-application-security/page/settings/log-integration.htm#Setuplogintegration) for obtaining credentials. Please note that this connector uses CEF log event format. [More information](https://docs.imperva.com/bundle/cloud-application-security/page/more/log-file-structure.htm#Logfilestructure) about log format.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ImpervaCloudWAF/Data%20Connectors/ImpervaWAFCloud_FunctionApp.json","true" -"","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","","","","","","","false","","false" -"CommonSecurityLog","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxCloudDataConnectorAma","Infoblox","[Recommended] Infoblox Cloud Data Connector via AMA","The Infoblox Cloud Data Connector allows you to easily connect your Infoblox data with Microsoft Sentinel. By connecting your logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""description"": "">**IMPORTANT:** This Microsoft Sentinel data connector assumes an Infoblox Data Connector host has already been created and configured in the Infoblox Cloud Services Portal (CSP). As the [**Infoblox Data Connector**](https://docs.infoblox.com/display/BloxOneThreatDefense/Deploying+the+Data+Connector+Solution) is a feature of Threat Defense, access to an appropriate Threat Defense subscription is required. See this [**quick-start guide**](https://www.infoblox.com/wp-content/uploads/infoblox-deployment-guide-data-connector.pdf) for more information and licensing requirements."", ""instructions"": []}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Configure Infoblox to send Syslog data to the Infoblox Cloud Data Connector to forward to the Syslog agent"", ""description"": ""Follow the steps below to configure the Infoblox CDC to send data to Microsoft Sentinel via the Linux Syslog agent.\n1. Navigate to **Manage > Data Connector**.\n2. Click the **Destination Configuration** tab at the top.\n3. Click **Create > Syslog**. \n - **Name**: Give the new Destination a meaningful **name**, such as **Microsoft-Sentinel-Destination**.\n - **Description**: Optionally give it a meaningful **description**.\n - **State**: Set the state to **Enabled**.\n - **Format**: Set the format to **CEF**.\n - **FQDN/IP**: Enter the IP address of the Linux device on which the Linux agent is installed.\n - **Port**: Leave the port number at **514**.\n - **Protocol**: Select desired protocol and CA certificate if applicable.\n - Click **Save & Close**.\n4. Click the **Traffic Flow Configuration** tab at the top.\n5. Click **Create**.\n - **Name**: Give the new Traffic Flow a meaningful **name**, such as **Microsoft-Sentinel-Flow**.\n - **Description**: Optionally give it a meaningful **description**. \n - **State**: Set the state to **Enabled**. \n - Expand the **Service Instance** section. \n - **Service Instance**: Select your desired Service Instance for which the Data Connector service is enabled. \n - Expand the **Source Configuration** section. \n - **Source**: Select **BloxOne Cloud Source**. \n - Select all desired **log types** you wish to collect. Currently supported log types are:\n - Threat Defense Query/Response Log\n - Threat Defense Threat Feeds Hits Log\n - DDI Query/Response Log\n - DDI DHCP Lease Log\n - Expand the **Destination Configuration** section. \n - Select the **Destination** you just created. \n - Click **Save & Close**. \n6. Allow the configuration some time to activate.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCEFDataConnector/template_InfobloxCloudDataConnectorAma.JSON","true" -"Failed_Range_To_Ingest_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" -"Infoblox_Failed_Indicators_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" -"dossier_atp_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" -"dossier_atp_threat_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" -"dossier_dns_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" -"dossier_geo_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" -"dossier_infoblox_web_cat_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" -"dossier_inforank_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" -"dossier_malware_analysis_v3_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" -"dossier_nameserver_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" -"dossier_nameserver_matches_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" -"dossier_ptr_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" -"dossier_rpz_feeds_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" -"dossier_rpz_feeds_records_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" -"dossier_threat_actor_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" -"dossier_tld_risk_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" -"dossier_whitelist_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" -"dossier_whois_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" -"CommonSecurityLog","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxSOCInsightsDataConnector_AMA","Infoblox","[Recommended] Infoblox SOC Insight Data Connector via AMA","The Infoblox SOC Insight Data Connector allows you to easily connect your Infoblox BloxOne SOC Insight data with Microsoft Sentinel. By connecting your logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.

This data connector ingests Infoblox SOC Insight CDC logs into your Log Analytics Workspace using the new Azure Monitor Agent. Learn more about ingesting using the new Azure Monitor Agent [here](https://learn.microsoft.com/azure/sentinel/connect-cef-ama). **Microsoft recommends using this Data Connector.**","[{""title"": ""Workspace Keys"", ""description"": ""In order to use the playbooks as part of this solution, find your **Workspace ID** and **Workspace Primary Key** below for your convenience."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Workspace Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Parsers"", ""description"": "">This data connector depends on a parser based on a Kusto Function to work as expected called [**InfobloxCDC_SOCInsights**](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20SOC%20Insights/Parsers/InfobloxCDC_SOCInsights.yaml) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": ""SOC Insights"", ""description"": "">This data connector assumes you have access to Infoblox BloxOne Threat Defense SOC Insights. You can find more information about SOC Insights [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/501514252/SOC+Insights).""}, {""title"": ""Infoblox Cloud Data Connector"", ""description"": "">This data connector assumes an Infoblox Data Connector host has already been created and configured in the Infoblox Cloud Services Portal (CSP). As the [**Infoblox Data Connector**](https://docs.infoblox.com/display/BloxOneThreatDefense/Deploying+the+Data+Connector+Solution) is a feature of BloxOne Threat Defense, access to an appropriate BloxOne Threat Defense subscription is required. See this [**quick-start guide**](https://www.infoblox.com/wp-content/uploads/infoblox-deployment-guide-data-connector.pdf) for more information and licensing requirements."", ""instructions"": [{""parameters"": {""title"": ""Follow the steps below to configure this data connector"", ""instructionSteps"": [{""title"": ""A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note: CEF logs are collected only from Linux Agents_\n\n1. Navigate to your **Microsoft Sentinel workspace > Data connectors** blade.\n\n2. Search for the **Common Event Format (CEF) via AMA** data connector and open it.\n\n3. Ensure there is no existing DCR configured to collect required facility of logs as it may cause log duplication. Create a new **DCR (Data Collection Rule)**.\n\n\t_Note: It is recommended to install the AMA agent v1.27 at minimum. [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplication._\n\n4. Run the command provided in the **Common Event Format (CEF) via AMA** data connector page to configure the CEF collector on the machine.""}, {""title"": ""B. Within the Infoblox Cloud Services Portal, configure Infoblox BloxOne to send CEF Syslog data to the Infoblox Cloud Data Connector to forward to the Syslog agent"", ""description"": ""Follow the steps below to configure the Infoblox CDC to send BloxOne data to Microsoft Sentinel via the Linux Syslog agent.\n1. Navigate to **Manage > Data Connector**.\n2. Click the **Destination Configuration** tab at the top.\n3. Click **Create > Syslog**. \n - **Name**: Give the new Destination a meaningful **name**, such as **Microsoft-Sentinel-Destination**.\n - **Description**: Optionally give it a meaningful **description**.\n - **State**: Set the state to **Enabled**.\n - **Format**: Set the format to **CEF**.\n - **FQDN/IP**: Enter the IP address of the Linux device on which the Linux agent is installed.\n - **Port**: Leave the port number at **514**.\n - **Protocol**: Select desired protocol and CA certificate if applicable.\n - Click **Save & Close**.\n4. Click the **Traffic Flow Configuration** tab at the top.\n5. Click **Create**.\n - **Name**: Give the new Traffic Flow a meaningful **name**, such as **Microsoft-Sentinel-Flow**.\n - **Description**: Optionally give it a meaningful **description**. \n - **State**: Set the state to **Enabled**. \n - Expand the **Service Instance** section. \n - **Service Instance**: Select your desired Service Instance for which the Data Connector service is enabled. \n - Expand the **Source Configuration** section. \n - **Source**: Select **BloxOne Cloud Source**. \n - Select the **Internal Notifications** Log Type.\n - Expand the **Destination Configuration** section. \n - Select the **Destination** you just created. \n - Click **Save & Close**. \n6. Allow the configuration some time to activate.""}, {""title"": ""C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed. [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxSOCInsights/InfobloxSOCInsightsDataConnector_AMA.json","true" -"InfobloxInsight_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxSOCInsightsDataConnector_API","Infoblox","Infoblox SOC Insight Data Connector via REST API","The Infoblox SOC Insight Data Connector allows you to easily connect your Infoblox BloxOne SOC Insight data with Microsoft Sentinel. By connecting your logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": ""Workspace Keys"", ""description"": ""In order to use the playbooks as part of this solution, find your **Workspace ID** and **Workspace Primary Key** below for your convenience."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Workspace Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Parsers"", ""description"": "">This data connector depends on a parser based on a Kusto Function to work as expected called [**InfobloxInsight**](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20SOC%20Insights/Parsers/InfobloxInsight.yaml) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": ""SOC Insights"", ""description"": "">This data connector assumes you have access to Infoblox BloxOne Threat Defense SOC Insights. You can find more information about SOC Insights [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/501514252/SOC+Insights).""}, {""title"": ""Follow the steps below to configure this data connector"", ""description"": """", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""1. Generate an Infoblox API Key and copy it somewhere safe"", ""description"": ""In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": ""2. Configure the Infoblox-SOC-Get-Open-Insights-API playbook"", ""description"": ""Create and configure the **Infoblox-SOC-Get-Open-Insights-API** playbook which is deployed with this solution. Enter your Infoblox API key in the appropriate parameter when prompted.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxSOCInsights/InfobloxSOCInsightsDataConnector_API.json","true" -"CommonSecurityLog","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxSOCInsightsDataConnector_Legacy","Infoblox","[Deprecated] Infoblox SOC Insight Data Connector via Legacy Agent","The Infoblox SOC Insight Data Connector allows you to easily connect your Infoblox BloxOne SOC Insight data with Microsoft Sentinel. By connecting your logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.

This data connector ingests Infoblox SOC Insight CDC logs into your Log Analytics Workspace using the legacy Log Analytics agent.

**Microsoft recommends installation of Infoblox SOC Insight Data Connector via AMA Connector.** The legacy connector uses the Log Analytics agent which is about to be deprecated by **Aug 31, 2024,** and should only be installed where AMA is not supported.

Using MMA and AMA on the same machine can cause log duplication and extra ingestion cost. [More details](https://learn.microsoft.com/en-us/azure/sentinel/ama-migrate).","[{""title"": ""Workspace Keys"", ""description"": ""In order to use the playbooks as part of this solution, find your **Workspace ID** and **Workspace Primary Key** below for your convenience."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Workspace Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Parsers"", ""description"": "">This data connector depends on a parser based on a Kusto Function to work as expected called [**InfobloxCDC_SOCInsights**](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20SOC%20Insights/Parsers/InfobloxCDC_SOCInsights.yaml) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""SOC Insights"", ""description"": "">This data connector assumes you have access to Infoblox BloxOne Threat Defense SOC Insights. You can find more information about SOC Insights [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/501514252/SOC+Insights). "", ""instructions"": []}, {""title"": ""Infoblox Cloud Data Connector"", ""description"": "">This data connector assumes an Infoblox Data Connector host has already been created and configured in the Infoblox Cloud Services Portal (CSP). As the [**Infoblox Data Connector**](https://docs.infoblox.com/display/BloxOneThreatDefense/Deploying+the+Data+Connector+Solution) is a feature of BloxOne Threat Defense, access to an appropriate BloxOne Threat Defense subscription is required. See this [**quick-start guide**](https://www.infoblox.com/wp-content/uploads/infoblox-deployment-guide-data-connector.pdf) for more information and licensing requirements."", ""instructions"": []}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Within the Infoblox Cloud Services Portal, configure Infoblox BloxOne to send CEF Syslog data to the Infoblox Cloud Data Connector to forward to the Syslog agent"", ""description"": ""Follow the steps below to configure the Infoblox CDC to send BloxOne data to Microsoft Sentinel via the Linux Syslog agent.\n1. Navigate to **Manage > Data Connector**.\n2. Click the **Destination Configuration** tab at the top.\n3. Click **Create > Syslog**. \n - **Name**: Give the new Destination a meaningful **name**, such as **Microsoft-Sentinel-Destination**.\n - **Description**: Optionally give it a meaningful **description**.\n - **State**: Set the state to **Enabled**.\n - **Format**: Set the format to **CEF**.\n - **FQDN/IP**: Enter the IP address of the Linux device on which the Linux agent is installed.\n - **Port**: Leave the port number at **514**.\n - **Protocol**: Select desired protocol and CA certificate if applicable.\n - Click **Save & Close**.\n4. Click the **Traffic Flow Configuration** tab at the top.\n5. Click **Create**.\n - **Name**: Give the new Traffic Flow a meaningful **name**, such as **Microsoft-Sentinel-Flow**.\n - **Description**: Optionally give it a meaningful **description**. \n - **State**: Set the state to **Enabled**. \n - Expand the **Service Instance** section. \n - **Service Instance**: Select your desired Service Instance for which the Data Connector service is enabled. \n - Expand the **Source Configuration** section. \n - **Source**: Select **BloxOne Cloud Source**. \n - Select the **Internal Notifications** Log Type.\n - Expand the **Destination Configuration** section. \n - Select the **Destination** you just created. \n - Click **Save & Close**. \n6. Allow the configuration some time to activate.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxSOCInsights/InfobloxSOCInsightsDataConnector_Legacy.json","true" -"","Infoblox Cloud Data Connector","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20Cloud%20Data%20Connector","infoblox","infoblox-cdc-solution","2021-10-20","","","Infoblox","Partner","https://support.infoblox.com/","","domains","","","","","","","false","","false" -"CommonSecurityLog","Infoblox Cloud Data Connector","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20Cloud%20Data%20Connector","infoblox","infoblox-cdc-solution","2021-10-20","","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxCloudDataConnector","Infoblox","[Deprecated] Infoblox Cloud Data Connector via Legacy Agent","The Infoblox Cloud Data Connector allows you to easily connect your Infoblox BloxOne data with Microsoft Sentinel. By connecting your logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**IMPORTANT:** This data connector depends on a parser based on a Kusto Function to work as expected called [**InfobloxCDC**](https://aka.ms/sentinel-InfobloxCloudDataConnector-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** This Microsoft Sentinel data connector assumes an Infoblox Data Connector host has already been created and configured in the Infoblox Cloud Services Portal (CSP). As the [**Infoblox Data Connector**](https://docs.infoblox.com/display/BloxOneThreatDefense/Deploying+the+Data+Connector+Solution) is a feature of BloxOne Threat Defense, access to an appropriate BloxOne Threat Defense subscription is required. See this [**quick-start guide**](https://www.infoblox.com/wp-content/uploads/infoblox-deployment-guide-data-connector.pdf) for more information and licensing requirements."", ""instructions"": []}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Configure Infoblox BloxOne to send Syslog data to the Infoblox Cloud Data Connector to forward to the Syslog agent"", ""description"": ""Follow the steps below to configure the Infoblox CDC to send BloxOne data to Microsoft Sentinel via the Linux Syslog agent.\n1. Navigate to **Manage > Data Connector**.\n2. Click the **Destination Configuration** tab at the top.\n3. Click **Create > Syslog**. \n - **Name**: Give the new Destination a meaningful **name**, such as **Microsoft-Sentinel-Destination**.\n - **Description**: Optionally give it a meaningful **description**.\n - **State**: Set the state to **Enabled**.\n - **Format**: Set the format to **CEF**.\n - **FQDN/IP**: Enter the IP address of the Linux device on which the Linux agent is installed.\n - **Port**: Leave the port number at **514**.\n - **Protocol**: Select desired protocol and CA certificate if applicable.\n - Click **Save & Close**.\n4. Click the **Traffic Flow Configuration** tab at the top.\n5. Click **Create**.\n - **Name**: Give the new Traffic Flow a meaningful **name**, such as **Microsoft-Sentinel-Flow**.\n - **Description**: Optionally give it a meaningful **description**. \n - **State**: Set the state to **Enabled**. \n - Expand the **Service Instance** section. \n - **Service Instance**: Select your desired Service Instance for which the Data Connector service is enabled. \n - Expand the **Source Configuration** section. \n - **Source**: Select **BloxOne Cloud Source**. \n - Select all desired **log types** you wish to collect. Currently supported log types are:\n - Threat Defense Query/Response Log\n - Threat Defense Threat Feeds Hits Log\n - DDI Query/Response Log\n - DDI DHCP Lease Log\n - Expand the **Destination Configuration** section. \n - Select the **Destination** you just created. \n - Click **Save & Close**. \n6. Allow the configuration some time to activate.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20Cloud%20Data%20Connector/Data%20Connectors/InfobloxCloudDataConnector.json","true" -"CommonSecurityLog","Infoblox Cloud Data Connector","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20Cloud%20Data%20Connector","infoblox","infoblox-cdc-solution","2021-10-20","","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxCloudDataConnectorAma","Infoblox","[Deprecated] Infoblox Cloud Data Connector via AMA","The Infoblox Cloud Data Connector allows you to easily connect your Infoblox BloxOne data with Microsoft Sentinel. By connecting your logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**IMPORTANT:** This data connector depends on a parser based on a Kusto Function to work as expected called [**InfobloxCDC**](https://aka.ms/sentinel-InfobloxCloudDataConnector-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** This Microsoft Sentinel data connector assumes an Infoblox Data Connector host has already been created and configured in the Infoblox Cloud Services Portal (CSP). As the [**Infoblox Data Connector**](https://docs.infoblox.com/display/BloxOneThreatDefense/Deploying+the+Data+Connector+Solution) is a feature of BloxOne Threat Defense, access to an appropriate BloxOne Threat Defense subscription is required. See this [**quick-start guide**](https://www.infoblox.com/wp-content/uploads/infoblox-deployment-guide-data-connector.pdf) for more information and licensing requirements."", ""instructions"": [{""parameters"": {""title"": ""1. Follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note: CEF logs are collected only from Linux Agents_\n\n1. Navigate to your **Microsoft Sentinel workspace > Data connectors** blade.\n\n2. Search for the **Common Event Format (CEF) via AMA** data connector and open it.\n\n3. Ensure there is no existing DCR configured to collect required facility of logs as it may cause log duplication. Create a new **DCR (Data Collection Rule)**.\n\n\t_Note: It is recommended to install the AMA agent v1.27 at minimum. [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplication._\n\n4. Run the command provided in the **CEF via AMA data connector** page to configure the CEF collector on the machine.""}, {""title"": ""Step B. Configure Infoblox BloxOne to send Syslog data to the Infoblox Cloud Data Connector to forward to the Syslog agent"", ""description"": ""Follow the steps below to configure the Infoblox CDC to send BloxOne data to Microsoft Sentinel via the Linux Syslog agent.\n1. Navigate to **Manage > Data Connector**.\n2. Click the **Destination Configuration** tab at the top.\n3. Click **Create > Syslog**. \n - **Name**: Give the new Destination a meaningful **name**, such as **Microsoft-Sentinel-Destination**.\n - **Description**: Optionally give it a meaningful **description**.\n - **State**: Set the state to **Enabled**.\n - **Format**: Set the format to **CEF**.\n - **FQDN/IP**: Enter the IP address of the Linux device on which the Linux agent is installed.\n - **Port**: Leave the port number at **514**.\n - **Protocol**: Select desired protocol and CA certificate if applicable.\n - Click **Save & Close**.\n4. Click the **Traffic Flow Configuration** tab at the top.\n5. Click **Create**.\n - **Name**: Give the new Traffic Flow a meaningful **name**, such as **Microsoft-Sentinel-Flow**.\n - **Description**: Optionally give it a meaningful **description**. \n - **State**: Set the state to **Enabled**. \n - Expand the **Service Instance** section. \n - **Service Instance**: Select your desired Service Instance for which the Data Connector service is enabled. \n - Expand the **Source Configuration** section. \n - **Source**: Select **BloxOne Cloud Source**. \n - Select all desired **log types** you wish to collect. Currently supported log types are:\n - Threat Defense Query/Response Log\n - Threat Defense Threat Feeds Hits Log\n - DDI Query/Response Log\n - DDI DHCP Lease Log\n - Expand the **Destination Configuration** section. \n - Select the **Destination** you just created. \n - Click **Save & Close**. \n6. Allow the configuration some time to activate.""}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20Cloud%20Data%20Connector/Data%20Connectors/template_InfobloxCloudDataConnectorAMA.json","true" -"","Infoblox NIOS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20NIOS","azuresentinel","azure-sentinel-solution-infobloxnios","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"Syslog","Infoblox NIOS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20NIOS","azuresentinel","azure-sentinel-solution-infobloxnios","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","InfobloxNIOS","Infoblox","[Deprecated] Infoblox NIOS","The [Infoblox Network Identity Operating System (NIOS)](https://www.infoblox.com/glossary/network-identity-operating-system-nios/) connector allows you to easily connect your Infoblox NIOS logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Infoblox and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20NIOS/Parser/Infoblox.yaml), on the second line of the query, enter any unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n 2. Select **Apply below configuration to my machines** and select the facilities and severities.\n 3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure and connect the Infoblox NIOS"", ""description"": ""[Follow these instructions](https://www.infoblox.com/wp-content/uploads/infoblox-deployment-guide-slog-and-snmp-configuration-for-nios.pdf) to enable syslog forwarding of Infoblox NIOS Logs. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address.""}, {""title"": ""4. Configure the Sentinel parser"", ""description"": ""Update the watchlist 'Sources_by_Source' with the hostname(s) of your Infoblox device(s). Set SourceType to 'InfobloxNIOS' and Source to the value of 'Computer' seen in the logs seen in Syslog table.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Infoblox NIOS"", ""description"": ""must be configured to export logs via Syslog""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20NIOS/Data%20Connectors/Connector_Syslog_Infoblox.json","true" -"","Infoblox SOC Insights","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20SOC%20Insights","infoblox","infoblox-soc-insight-solution","2024-03-06","","","Infoblox","Partner","https://support.infoblox.com/","","domains","","","","","","","false","","false" -"CommonSecurityLog","Infoblox SOC Insights","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20SOC%20Insights","infoblox","infoblox-soc-insight-solution","2024-03-06","","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxSOCInsightsDataConnector_AMA","Infoblox","[Deprecated] Infoblox SOC Insight Data Connector via AMA","The Infoblox SOC Insight Data Connector allows you to easily connect your Infoblox BloxOne SOC Insight data with Microsoft Sentinel. By connecting your logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.

This data connector ingests Infoblox SOC Insight CDC logs into your Log Analytics Workspace using the new Azure Monitor Agent. Learn more about ingesting using the new Azure Monitor Agent [here](https://learn.microsoft.com/azure/sentinel/connect-cef-ama). **Microsoft recommends using this Data Connector.**","[{""title"": ""Workspace Keys"", ""description"": ""In order to use the playbooks as part of this solution, find your **Workspace ID** and **Workspace Primary Key** below for your convenience."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Workspace Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Parsers"", ""description"": "">This data connector depends on a parser based on a Kusto Function to work as expected called [**InfobloxCDC_SOCInsights**](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20SOC%20Insights/Parsers/InfobloxCDC_SOCInsights.yaml) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": ""SOC Insights"", ""description"": "">This data connector assumes you have access to Infoblox BloxOne Threat Defense SOC Insights. You can find more information about SOC Insights [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/501514252/SOC+Insights).""}, {""title"": ""Infoblox Cloud Data Connector"", ""description"": "">This data connector assumes an Infoblox Data Connector host has already been created and configured in the Infoblox Cloud Services Portal (CSP). As the [**Infoblox Data Connector**](https://docs.infoblox.com/display/BloxOneThreatDefense/Deploying+the+Data+Connector+Solution) is a feature of BloxOne Threat Defense, access to an appropriate BloxOne Threat Defense subscription is required. See this [**quick-start guide**](https://www.infoblox.com/wp-content/uploads/infoblox-deployment-guide-data-connector.pdf) for more information and licensing requirements."", ""instructions"": [{""parameters"": {""title"": ""Follow the steps below to configure this data connector"", ""instructionSteps"": [{""title"": ""A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note: CEF logs are collected only from Linux Agents_\n\n1. Navigate to your **Microsoft Sentinel workspace > Data connectors** blade.\n\n2. Search for the **Common Event Format (CEF) via AMA** data connector and open it.\n\n3. Ensure there is no existing DCR configured to collect required facility of logs as it may cause log duplication. Create a new **DCR (Data Collection Rule)**.\n\n\t_Note: It is recommended to install the AMA agent v1.27 at minimum. [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplication._\n\n4. Run the command provided in the **Common Event Format (CEF) via AMA** data connector page to configure the CEF collector on the machine.""}, {""title"": ""B. Within the Infoblox Cloud Services Portal, configure Infoblox BloxOne to send CEF Syslog data to the Infoblox Cloud Data Connector to forward to the Syslog agent"", ""description"": ""Follow the steps below to configure the Infoblox CDC to send BloxOne data to Microsoft Sentinel via the Linux Syslog agent.\n1. Navigate to **Manage > Data Connector**.\n2. Click the **Destination Configuration** tab at the top.\n3. Click **Create > Syslog**. \n - **Name**: Give the new Destination a meaningful **name**, such as **Microsoft-Sentinel-Destination**.\n - **Description**: Optionally give it a meaningful **description**.\n - **State**: Set the state to **Enabled**.\n - **Format**: Set the format to **CEF**.\n - **FQDN/IP**: Enter the IP address of the Linux device on which the Linux agent is installed.\n - **Port**: Leave the port number at **514**.\n - **Protocol**: Select desired protocol and CA certificate if applicable.\n - Click **Save & Close**.\n4. Click the **Traffic Flow Configuration** tab at the top.\n5. Click **Create**.\n - **Name**: Give the new Traffic Flow a meaningful **name**, such as **Microsoft-Sentinel-Flow**.\n - **Description**: Optionally give it a meaningful **description**. \n - **State**: Set the state to **Enabled**. \n - Expand the **Service Instance** section. \n - **Service Instance**: Select your desired Service Instance for which the Data Connector service is enabled. \n - Expand the **Source Configuration** section. \n - **Source**: Select **BloxOne Cloud Source**. \n - Select the **Internal Notifications** Log Type.\n - Expand the **Destination Configuration** section. \n - Select the **Destination** you just created. \n - Click **Save & Close**. \n6. Allow the configuration some time to activate.""}, {""title"": ""C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed. [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20SOC%20Insights/Data%20Connectors/InfobloxSOCInsightsDataConnector_AMA.json","true" -"InfobloxInsight_CL","Infoblox SOC Insights","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20SOC%20Insights","infoblox","infoblox-soc-insight-solution","2024-03-06","","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxSOCInsightsDataConnector_API","Infoblox","Infoblox SOC Insight Data Connector via REST API","The Infoblox SOC Insight Data Connector allows you to easily connect your Infoblox BloxOne SOC Insight data with Microsoft Sentinel. By connecting your logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": ""Workspace Keys"", ""description"": ""In order to use the playbooks as part of this solution, find your **Workspace ID** and **Workspace Primary Key** below for your convenience."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Workspace Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Parsers"", ""description"": "">This data connector depends on a parser based on a Kusto Function to work as expected called [**InfobloxInsight**](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20SOC%20Insights/Parsers/InfobloxInsight.yaml) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""SOC Insights"", ""description"": "">This data connector assumes you have access to Infoblox BloxOne Threat Defense SOC Insights. You can find more information about SOC Insights [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/501514252/SOC+Insights)."", ""instructions"": []}, {""title"": ""Follow the steps below to configure this data connector"", ""description"": """", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""1. Generate an Infoblox API Key and copy it somewhere safe"", ""description"": ""In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F)."", ""instructions"": []}, {""title"": ""2. Configure the Infoblox-SOC-Get-Open-Insights-API playbook"", ""description"": ""Create and configure the **Infoblox-SOC-Get-Open-Insights-API** playbook which is deployed with this solution. Enter your Infoblox API key in the appropriate parameter when prompted."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20SOC%20Insights/Data%20Connectors/InfobloxSOCInsightsDataConnector_API.json","true" -"CommonSecurityLog","Infoblox SOC Insights","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20SOC%20Insights","infoblox","infoblox-soc-insight-solution","2024-03-06","","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxSOCInsightsDataConnector_Legacy","Infoblox","[Deprecated] Infoblox SOC Insight Data Connector via Legacy Agent","The Infoblox SOC Insight Data Connector allows you to easily connect your Infoblox BloxOne SOC Insight data with Microsoft Sentinel. By connecting your logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.

This data connector ingests Infoblox SOC Insight CDC logs into your Log Analytics Workspace using the legacy Log Analytics agent.

**Microsoft recommends installation of Infoblox SOC Insight Data Connector via AMA Connector.** The legacy connector uses the Log Analytics agent which is about to be deprecated by **Aug 31, 2024,** and should only be installed where AMA is not supported.

Using MMA and AMA on the same machine can cause log duplication and extra ingestion cost. [More details](https://learn.microsoft.com/en-us/azure/sentinel/ama-migrate).","[{""title"": ""Workspace Keys"", ""description"": ""In order to use the playbooks as part of this solution, find your **Workspace ID** and **Workspace Primary Key** below for your convenience."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Workspace Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Parsers"", ""description"": "">This data connector depends on a parser based on a Kusto Function to work as expected called [**InfobloxCDC_SOCInsights**](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20SOC%20Insights/Parsers/InfobloxCDC_SOCInsights.yaml) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""SOC Insights"", ""description"": "">This data connector assumes you have access to Infoblox BloxOne Threat Defense SOC Insights. You can find more information about SOC Insights [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/501514252/SOC+Insights). "", ""instructions"": []}, {""title"": ""Infoblox Cloud Data Connector"", ""description"": "">This data connector assumes an Infoblox Data Connector host has already been created and configured in the Infoblox Cloud Services Portal (CSP). As the [**Infoblox Data Connector**](https://docs.infoblox.com/display/BloxOneThreatDefense/Deploying+the+Data+Connector+Solution) is a feature of BloxOne Threat Defense, access to an appropriate BloxOne Threat Defense subscription is required. See this [**quick-start guide**](https://www.infoblox.com/wp-content/uploads/infoblox-deployment-guide-data-connector.pdf) for more information and licensing requirements."", ""instructions"": []}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Within the Infoblox Cloud Services Portal, configure Infoblox BloxOne to send CEF Syslog data to the Infoblox Cloud Data Connector to forward to the Syslog agent"", ""description"": ""Follow the steps below to configure the Infoblox CDC to send BloxOne data to Microsoft Sentinel via the Linux Syslog agent.\n1. Navigate to **Manage > Data Connector**.\n2. Click the **Destination Configuration** tab at the top.\n3. Click **Create > Syslog**. \n - **Name**: Give the new Destination a meaningful **name**, such as **Microsoft-Sentinel-Destination**.\n - **Description**: Optionally give it a meaningful **description**.\n - **State**: Set the state to **Enabled**.\n - **Format**: Set the format to **CEF**.\n - **FQDN/IP**: Enter the IP address of the Linux device on which the Linux agent is installed.\n - **Port**: Leave the port number at **514**.\n - **Protocol**: Select desired protocol and CA certificate if applicable.\n - Click **Save & Close**.\n4. Click the **Traffic Flow Configuration** tab at the top.\n5. Click **Create**.\n - **Name**: Give the new Traffic Flow a meaningful **name**, such as **Microsoft-Sentinel-Flow**.\n - **Description**: Optionally give it a meaningful **description**. \n - **State**: Set the state to **Enabled**. \n - Expand the **Service Instance** section. \n - **Service Instance**: Select your desired Service Instance for which the Data Connector service is enabled. \n - Expand the **Source Configuration** section. \n - **Source**: Select **BloxOne Cloud Source**. \n - Select the **Internal Notifications** Log Type.\n - Expand the **Destination Configuration** section. \n - Select the **Destination** you just created. \n - Click **Save & Close**. \n6. Allow the configuration some time to activate.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20SOC%20Insights/Data%20Connectors/InfobloxSOCInsightsDataConnector_Legacy.json","true" -"","InsightVM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/InsightVM","","","","","","","","","","","","","","","","","false","","false" -"","Integration for Atlassian Beacon","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Integration%20for%20Atlassian%20Beacon","defendlimited1682894612656","microsoft-sentinel-solution-atlassian-beacon","2023-09-22","","","DEFEND Ltd.","Partner","https://www.defend.co.nz/","","domains","","","","","","","false","","false" -"atlassian_beacon_alerts_CL","Integration for Atlassian Beacon","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Integration%20for%20Atlassian%20Beacon","defendlimited1682894612656","microsoft-sentinel-solution-atlassian-beacon","2023-09-22","","","DEFEND Ltd.","Partner","https://www.defend.co.nz/","","domains","AtlassianBeaconAlerts","DEFEND Ltd.","Atlassian Beacon Alerts","Atlassian Beacon is a cloud product that is built for Intelligent threat detection across the Atlassian platforms (Jira, Confluence, and Atlassian Admin). This can help users detect, investigate and respond to risky user activity for the Atlassian suite of products. The solution is a custom data connector from DEFEND Ltd. that is used to visualize the alerts ingested from Atlassian Beacon to Microsoft Sentinel via a Logic App.","[{""description"": "">1. Navigate to the newly installed Logic App 'Atlassian Beacon Integration'\n\n>2. Navigate to 'Logic app designer'\n\n>3. Expand the 'When a HTTP request is received'\n\n>4. Copy the 'HTTP POST URL'"", ""title"": ""1. Microsoft Sentinel""}, {""description"": "">1. Login to Atlassian Beacon using an admin account\n\n>2. Navigate to 'SIEM forwarding' under SETTINGS\n\n> 3. Paste the copied URL from Logic App in the text box\n\n> 4. Click the 'Save' button"", ""title"": ""2. Atlassian Beacon""}, {""description"": "">1. Login to Atlassian Beacon using an admin account\n\n>2. Navigate to 'SIEM forwarding' under SETTINGS\n\n> 3. Click the 'Test' button right next to the newly configured webhook\n\n> 4. Navigate to Microsoft Sentinel\n\n> 5. Navigate to the newly installed Logic App\n\n> 6. Check for the Logic App Run under 'Runs history'\n\n> 7. Check for logs under the table name 'atlassian_beacon_alerts_CL' in 'Logs'\n\n> 8. If the analytic rule has been enabled, the above Test alert should have created an incident in Microsoft Sentinel"", ""title"": ""3. Testing and Validation""}]","{""resourceProvider"": [{""permissionsDisplayText"": ""read and write permissions are required."", ""provider"": ""Microsoft.OperationalInsights/workspaces"", ""providerDisplayName"": ""Workspace"", ""requiredPermissions"": {""delete"": true, ""read"": true, ""write"": true}, ""scope"": ""Workspace""}, {""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""providerDisplayName"": ""Keys"", ""requiredPermissions"": {""action"": true}, ""scope"": ""Workspace""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Integration%20for%20Atlassian%20Beacon/Data%20Connectors/AtlassianBeacon_DataConnector.json","true" -"","Intel471","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Intel471","intel471inc1641226539011","microsoft-sentinel-solution-intel471","2023-06-21","","","Intel 471","Partner","https://intel471.com/company/contact","","domains","","","","","","","false","","false" -"SecurityAlert","IoTOTThreatMonitoringwithDefenderforIoT","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IoTOTThreatMonitoringwithDefenderforIoT","azuresentinel","azure-sentinel-solution-unifiedmicrosoftsocforot","2021-10-26","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","IoT","Microsoft","Microsoft Defender for IoT","Gain insights into your IoT security by connecting Microsoft Defender for IoT alerts to Microsoft Sentinel.
You can get out-of-the-box alert metrics and data, including alert trends, top alerts, and alert breakdown by severity.
You can also get information about the recommendations provided for your IoT hubs including top recommendations and recommendations by severity. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2224002&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Connect Microsoft Defender for IoT to Microsoft Sentinel"", ""description"": ""Select Connect next to each Subscription whose IoT Hub's alerts you want to stream to Microsoft Sentinel."", ""instructions"": [{""parameters"": {""linkType"": ""OpenIotPricingModel""}, ""type"": ""InstallAgent""}, {""parameters"": {}, ""type"": ""IotV2""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Subscription"", ""description"": ""Contributor permissions to the subscription of your IoT Hub.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IoTOTThreatMonitoringwithDefenderforIoT/Data%20Connectors/template_IoT.JSON","true" -"","IronNet IronDefense","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IronNet%20IronDefense","ironnetcybersecurity1585849518753","irondefense-for-sentinel","2021-10-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"CommonSecurityLog","IronNet IronDefense","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IronNet%20IronDefense","ironnetcybersecurity1585849518753","irondefense-for-sentinel","2021-10-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","IronNetIronDefense","IronNet","IronNet IronDefense","The IronNet IronDefense connector enables ingest of IronDefense alerts, events, and IronDome notifications into Sentinel, enabling Sentinel to utilize IronDefense's behavioral analytics and the IronDome community to quickly identify threats in your enterprise network.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Configure the IronNet Data Collector to send alerts, events, and IronDome notifications in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address."", ""innerSteps"": [{""title"": ""2.1 Deploy the IronNet Data Collector VM"", ""description"": ""Deploy the IronNet Data Collector VM using the image provided by your IronNet representative.""}, {""title"": ""2.2 Configure the IronAPI connector using the Data Collector wizard."", ""description"": ""Ssh into the Data Collector VM as the config user and use the Data Collector configuration wizard to configure the IronAPI connector to receive notifications from IronDefense and forward them to your Microsoft Sentinel workspace. You will need:\n\n> 1. IronAPI credentials.\n\n> 2. IronDefense hostname.\n\n> 3. The public IP of the linux machine running the CEF collector."", ""instructions"": [{""parameters"": {""label"": ""Run the following command to launch the Data Collector configuration wizard:"", ""value"": ""wizard""}, ""type"": ""CopyableLabel""}]}, {""title"": ""2.2 Verify IronAPI connector configuration"", ""description"": ""Verify the IronAPI connector has been configured properly and is running normally."", ""instructions"": [{""parameters"": {""label"": ""Run the following command to view the logs in the IronAPI connector. If no errors occur after 5 minutes, the connector is running normally."", ""value"": ""sudo journalctl -f CONTAINER_NAME=ironapi-notifications-collector""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IronNet%20IronDefense/Data%20Connectors/IronNetIronDefense.json","true" -"","Island","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Island","islandtechnologyinc1679434413850","island-sentinel-solution","2023-05-02","2023-07-20","","Island","Partner","https://www.island.io","","domains","","","","","","","false","","false" -"Island_Admin_CL","Island","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Island","islandtechnologyinc1679434413850","island-sentinel-solution","2023-05-02","2023-07-20","","Island","Partner","https://www.island.io","","domains","Island_Admin_Polling","Island","Island Enterprise Browser Admin Audit (Polling CCP)","The [Island](https://www.island.io) Admin connector provides the capability to ingest Island Admin Audit logs into Microsoft Sentinel.","[{""title"": ""Connect Island to Microsoft Sentinel"", ""description"": ""Provide the Island API URL and Key. API URL is https://management.island.io/api/external/v1/adminActions for US or https://eu.management.island.io/api/external/v1/adminActions for EU.\n Generate the API Key in the Management Console under Settings > API."", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""API URL"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{placeHolder1}}"", ""placeHolderValue"": """"}]}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true, ""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Island API Key"", ""description"": ""An Island API key is required.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Island/Data%20Connectors/IslandAdminAPIConnector.json","true" -"Island_User_CL","Island","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Island","islandtechnologyinc1679434413850","island-sentinel-solution","2023-05-02","2023-07-20","","Island","Partner","https://www.island.io","","domains","Island_User_Polling","Island","Island Enterprise Browser User Activity (Polling CCP)","The [Island](https://www.island.io) connector provides the capability to ingest Island User Activity logs into Microsoft Sentinel.","[{""title"": ""Connect Island to Microsoft Sentinel"", ""description"": ""Provide the Island API URL and Key. API URL is https://management.island.io/api/external/v1/timeline for US or https://eu.management.island.io/api/external/v1/timeline for EU.\n Generate the API Key in the Management Console under Settings > API."", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""API URL"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{placeHolder1}}"", ""placeHolderValue"": """"}]}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true, ""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Island API Key"", ""description"": ""An Island API key is required.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Island/Data%20Connectors/IslandUserAPIConnector.json","true" -"","Ivanti Unified Endpoint Management","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Ivanti%20Unified%20Endpoint%20Management","azuresentinel","azure-sentinel-solution-ivantiuem","2022-07-05","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"Syslog","Ivanti Unified Endpoint Management","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Ivanti%20Unified%20Endpoint%20Management","azuresentinel","azure-sentinel-solution-ivantiuem","2022-07-05","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","IvantiUEM","Ivanti","[Deprecated] Ivanti Unified Endpoint Management","The [Ivanti Unified Endpoint Management](https://www.ivanti.com/products/unified-endpoint-manager) data connector provides the capability to ingest [Ivanti UEM Alerts](https://help.ivanti.com/ld/help/en_US/LDMS/11.0/Windows/alert-c-monitoring-overview.htm) into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**IvantiUEMEvent**](https://aka.ms/sentinel-ivantiuem-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**NOTE:** This data connector has been developed using Ivanti Unified Endpoint Management Release 2021.1 Version 11.0.3.374"", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Server where the Ivanti Unified Endpoint Management Alerts are forwarded.\n\n> Logs from Ivanti Unified Endpoint Management Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure Ivanti Unified Endpoint Management alert forwarding."", ""description"": ""[Follow the instructions](https://help.ivanti.com/ld/help/en_US/LDMS/11.0/Windows/alert-t-define-action.htm) to set up Alert Actions to send logs to syslog server.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Ivanti%20Unified%20Endpoint%20Management/Data%20Connectors/Ivanti_UEM_Syslog.json","true" -"","JBoss","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/JBoss","azuresentinel","azure-sentinel-solution-jboss","2021-10-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"JBossLogs_CL","JBoss","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/JBoss","azuresentinel","azure-sentinel-solution-jboss","2021-10-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","JBossEAP","Red Hat","[Deprecated] JBoss Enterprise Application Platform","The JBoss Enterprise Application Platform data connector provides the capability to ingest [JBoss](https://www.redhat.com/en/technologies/jboss-middleware/application-platform) events into Microsoft Sentinel. Refer to [Red Hat documentation](https://access.redhat.com/documentation/en-us/red_hat_jboss_enterprise_application_platform/7.0/html/configuration_guide/logging_with_jboss_eap) for more information.","[{""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**JBossEvent**](https://aka.ms/sentinel-jbosseap-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": "">**NOTE:** This data connector has been developed using JBoss Enterprise Application Platform 7.4.0."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the JBoss server where the logs are generated.\n\n> Logs from JBoss Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents.\n "", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the custom log directory to be collected"", ""instructions"": [{""parameters"": {""linkType"": ""OpenCustomLogsSettings""}, ""type"": ""InstallAgent""}]}, {""description"": ""1. Select the link above to open your workspace advanced settings \n2. Click **+Add custom**\n3. Click **Browse** to upload a sample of a JBoss log file (e.g. server.log). Then, click **Next >**\n4. Select **Timestamp** as the record delimiter and select Timestamp format **YYYY-MM-DD HH:MM:SS** from the dropdown list then click **Next >**\n5. Select **Windows** or **Linux** and enter the path to JBoss logs based on your configuration. Example:\n - **Linux** Directory:\n\n>Standalone server: EAP_HOME/standalone/log/server.log\n\n>Managed domain: EAP_HOME/domain/servers/SERVER_NAME/log/server.log\n\n6. After entering the path, click the '+' symbol to apply, then click **Next >** \n7. Add **JBossLogs** as the custom log Name and click **Done**""}, {""title"": ""3. Check logs in Microsoft Sentinel"", ""description"": ""Open Log Analytics to check if the logs are received using the JBossLogs_CL Custom log table.\n\n>**NOTE:** It may take up to 30 minutes before new logs will appear in JBossLogs_CL table."", ""instructions"": []}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/JBoss/Data%20Connectors/Connector_JBoss.json","true" -"","Jamf Protect","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Jamf%20Protect","jamfsoftwareaustraliaptyltd1620360395539","jamf_protect","2022-10-10","2025-09-02","","Jamf Software, LLC","Partner","https://www.jamf.com/support/","","domains","","","","","","","false","","false" -"jamfprotectalerts_CL","Jamf Protect","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Jamf%20Protect","jamfsoftwareaustraliaptyltd1620360395539","jamf_protect","2022-10-10","2025-09-02","","Jamf Software, LLC","Partner","https://www.jamf.com/support/","","domains","JamfProtectPush","Jamf","Jamf Protect Push Connector","The [Jamf Protect](https://www.jamf.com/products/jamf-protect/) connector provides the capability to read raw event data from Jamf Protect in Microsoft Sentinel.","[{""title"": ""1. Create ARM Resources and Provide the Required Permissions"", ""description"": ""This connector reads data from the tables that Jamf Protect uses in a Microsoft Analytics Workspace, if the [data forwarding](https://docs.jamf.com/jamf-protect/documentation/Data_Forwarding_to_a_Third_Party_Storage_Solution.html?hl=sentinel#task-4227) option is enabled in Jamf Protect then raw event data is sent to the Microsoft Sentinel Ingestion API."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Automated Configuration and Secure Data Ingestion with Entra Application \nClicking on \""Deploy\"" will trigger the creation of Log Analytics tables and a Data Collection Rule (DCR). \nIt will then create an Entra application, link the DCR to it, and set the entered secret in the application. This setup enables data to be sent securely to the DCR using an Entra token.""}}, {""parameters"": {""label"": ""Deploy Jamf Protect connector resources"", ""applicationDisplayName"": ""Jamf Protect Connector Application""}, ""type"": ""DeployPushConnectorButton""}]}, {""title"": ""2. Push your logs into the workspace"", ""description"": ""Use the following parameters to configure the your machine to send the logs to the workspace."", ""instructions"": [{""parameters"": {""label"": ""Tenant ID (Directory ID)"", ""fillWith"": [""TenantId""]}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Application ID"", ""fillWith"": [""ApplicationId""], ""placeholder"": ""Deploy push connector to get the App Registration Application ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Secret"", ""fillWith"": [""ApplicationSecret""], ""placeholder"": ""Deploy push connector to get the App Registration Secret""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Endpoint Uri"", ""fillWith"": [""DataCollectionEndpoint""], ""placeholder"": ""Deploy push connector to get the Data Collection Endpoint Uri""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Rule Immutable ID"", ""fillWith"": [""DataCollectionRuleId""], ""placeholder"": ""Deploy push connector to get the Data Collection Rule Immutable ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Unified Logs Stream Name"", ""value"": ""Custom-jamfprotectunifiedlogs""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Telemetry Stream Name"", ""value"": ""Custom-jamfprotecttelemetryv2""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Alerts Stream Name"", ""value"": ""Custom-jamfprotectalerts""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft Entra"", ""description"": ""Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher.""}, {""name"": ""Microsoft Azure"", ""description"": ""Permission to assign Monitoring Metrics Publisher role on data collection rule (DCR). Typically requires Azure RBAC Owner or User Access Administrator role""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Jamf%20Protect/Data%20Connectors/JamfProtect_ccp/connectorDefinition.json","true" -"jamfprotecttelemetryv2_CL","Jamf Protect","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Jamf%20Protect","jamfsoftwareaustraliaptyltd1620360395539","jamf_protect","2022-10-10","2025-09-02","","Jamf Software, LLC","Partner","https://www.jamf.com/support/","","domains","JamfProtectPush","Jamf","Jamf Protect Push Connector","The [Jamf Protect](https://www.jamf.com/products/jamf-protect/) connector provides the capability to read raw event data from Jamf Protect in Microsoft Sentinel.","[{""title"": ""1. Create ARM Resources and Provide the Required Permissions"", ""description"": ""This connector reads data from the tables that Jamf Protect uses in a Microsoft Analytics Workspace, if the [data forwarding](https://docs.jamf.com/jamf-protect/documentation/Data_Forwarding_to_a_Third_Party_Storage_Solution.html?hl=sentinel#task-4227) option is enabled in Jamf Protect then raw event data is sent to the Microsoft Sentinel Ingestion API."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Automated Configuration and Secure Data Ingestion with Entra Application \nClicking on \""Deploy\"" will trigger the creation of Log Analytics tables and a Data Collection Rule (DCR). \nIt will then create an Entra application, link the DCR to it, and set the entered secret in the application. This setup enables data to be sent securely to the DCR using an Entra token.""}}, {""parameters"": {""label"": ""Deploy Jamf Protect connector resources"", ""applicationDisplayName"": ""Jamf Protect Connector Application""}, ""type"": ""DeployPushConnectorButton""}]}, {""title"": ""2. Push your logs into the workspace"", ""description"": ""Use the following parameters to configure the your machine to send the logs to the workspace."", ""instructions"": [{""parameters"": {""label"": ""Tenant ID (Directory ID)"", ""fillWith"": [""TenantId""]}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Application ID"", ""fillWith"": [""ApplicationId""], ""placeholder"": ""Deploy push connector to get the App Registration Application ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Secret"", ""fillWith"": [""ApplicationSecret""], ""placeholder"": ""Deploy push connector to get the App Registration Secret""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Endpoint Uri"", ""fillWith"": [""DataCollectionEndpoint""], ""placeholder"": ""Deploy push connector to get the Data Collection Endpoint Uri""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Rule Immutable ID"", ""fillWith"": [""DataCollectionRuleId""], ""placeholder"": ""Deploy push connector to get the Data Collection Rule Immutable ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Unified Logs Stream Name"", ""value"": ""Custom-jamfprotectunifiedlogs""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Telemetry Stream Name"", ""value"": ""Custom-jamfprotecttelemetryv2""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Alerts Stream Name"", ""value"": ""Custom-jamfprotectalerts""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft Entra"", ""description"": ""Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher.""}, {""name"": ""Microsoft Azure"", ""description"": ""Permission to assign Monitoring Metrics Publisher role on data collection rule (DCR). Typically requires Azure RBAC Owner or User Access Administrator role""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Jamf%20Protect/Data%20Connectors/JamfProtect_ccp/connectorDefinition.json","true" -"jamfprotectunifiedlogs_CL","Jamf Protect","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Jamf%20Protect","jamfsoftwareaustraliaptyltd1620360395539","jamf_protect","2022-10-10","2025-09-02","","Jamf Software, LLC","Partner","https://www.jamf.com/support/","","domains","JamfProtectPush","Jamf","Jamf Protect Push Connector","The [Jamf Protect](https://www.jamf.com/products/jamf-protect/) connector provides the capability to read raw event data from Jamf Protect in Microsoft Sentinel.","[{""title"": ""1. Create ARM Resources and Provide the Required Permissions"", ""description"": ""This connector reads data from the tables that Jamf Protect uses in a Microsoft Analytics Workspace, if the [data forwarding](https://docs.jamf.com/jamf-protect/documentation/Data_Forwarding_to_a_Third_Party_Storage_Solution.html?hl=sentinel#task-4227) option is enabled in Jamf Protect then raw event data is sent to the Microsoft Sentinel Ingestion API."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Automated Configuration and Secure Data Ingestion with Entra Application \nClicking on \""Deploy\"" will trigger the creation of Log Analytics tables and a Data Collection Rule (DCR). \nIt will then create an Entra application, link the DCR to it, and set the entered secret in the application. This setup enables data to be sent securely to the DCR using an Entra token.""}}, {""parameters"": {""label"": ""Deploy Jamf Protect connector resources"", ""applicationDisplayName"": ""Jamf Protect Connector Application""}, ""type"": ""DeployPushConnectorButton""}]}, {""title"": ""2. Push your logs into the workspace"", ""description"": ""Use the following parameters to configure the your machine to send the logs to the workspace."", ""instructions"": [{""parameters"": {""label"": ""Tenant ID (Directory ID)"", ""fillWith"": [""TenantId""]}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Application ID"", ""fillWith"": [""ApplicationId""], ""placeholder"": ""Deploy push connector to get the App Registration Application ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Secret"", ""fillWith"": [""ApplicationSecret""], ""placeholder"": ""Deploy push connector to get the App Registration Secret""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Endpoint Uri"", ""fillWith"": [""DataCollectionEndpoint""], ""placeholder"": ""Deploy push connector to get the Data Collection Endpoint Uri""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Rule Immutable ID"", ""fillWith"": [""DataCollectionRuleId""], ""placeholder"": ""Deploy push connector to get the Data Collection Rule Immutable ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Unified Logs Stream Name"", ""value"": ""Custom-jamfprotectunifiedlogs""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Telemetry Stream Name"", ""value"": ""Custom-jamfprotecttelemetryv2""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Alerts Stream Name"", ""value"": ""Custom-jamfprotectalerts""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft Entra"", ""description"": ""Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher.""}, {""name"": ""Microsoft Azure"", ""description"": ""Permission to assign Monitoring Metrics Publisher role on data collection rule (DCR). Typically requires Azure RBAC Owner or User Access Administrator role""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Jamf%20Protect/Data%20Connectors/JamfProtect_ccp/connectorDefinition.json","true" -"","Joshua-Cyberiskvision","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Joshua-Cyberiskvision","almavivaspa1636563933762","joshua-cyberiskvision","2022-01-10","2022-01-10","","Joshua Cyberiskvision","Partner","https://www.cyberiskvision.com/","","domains","","","","","","","false","","false" -"","Juniper SRX","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Juniper%20SRX","azuresentinel","azure-sentinel-solution-junipersrx","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"Syslog","Juniper SRX","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Juniper%20SRX","azuresentinel","azure-sentinel-solution-junipersrx","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","JuniperSRX","Juniper","[Deprecated] Juniper SRX","The [Juniper SRX](https://www.juniper.net/us/en/products-services/security/srx-series/) connector allows you to easily connect your Juniper SRX logs with Microsoft Sentinel. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias JuniperSRX and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Juniper%20SRX/Parsers/JuniperSRX.txt), on the second line of the query, enter the hostname(s) of your JuniperSRX device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n 2. Select **Apply below configuration to my machines** and select the facilities and severities.\n 3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure and connect the Juniper SRX"", ""description"": ""1. Follow these instructions to configure the Juniper SRX to forward syslog: \n - [Traffic Logs (Security Policy Logs)](https://kb.juniper.net/InfoCenter/index?page=content&id=KB16509&actp=METADATA) \n - [System Logs](https://kb.juniper.net/InfoCenter/index?page=content&id=kb16502)\n2. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Juniper SRX"", ""description"": ""must be configured to export logs via Syslog""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Juniper%20SRX/Data%20Connectors/Connector_Syslog_JuniperSRX.json","true" -"","JuniperIDP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/JuniperIDP","azuresentinel","azure-sentinel-solution-juniperidp","2021-03-31","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"JuniperIDP_CL","JuniperIDP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/JuniperIDP","azuresentinel","azure-sentinel-solution-juniperidp","2021-03-31","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","JuniperIDP","Juniper","[Deprecated] Juniper IDP","The [Juniper](https://www.juniper.net/) IDP data connector provides the capability to ingest [Juniper IDP](https://www.juniper.net/documentation/us/en/software/junos/idp-policy/topics/topic-map/security-idp-overview.html) events into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on Kusto Function to work as expected [**JuniperIDP**](https://aka.ms/sentinel-JuniperIDP-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**NOTE:** IDP OS 5.1 and above is supported by this data connector."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Server."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Follow the configuration steps below to get Juniper IDP logs into Microsoft Sentinel. This configuration enriches events generated by Juniper IDP module to provide visibility on log source information for Juniper IDP logs. Refer to the [Azure Monitor Documentation](https://docs.microsoft.com/azure/azure-monitor/agents/data-sources-json) for more details on these steps.\n1. Download config file [juniper_idp.conf](https://aka.ms/sentinel-JuniperIDP-conf).\n2. Login to the server where you have installed Azure Log Analytics agent.\n3. Copy juniper_idp.conf to the /etc/opt/microsoft/omsagent/**workspace_id**/conf/omsagent.d/ folder.\n4. Edit juniper_idp.conf as follows:\n\n\t i. change the listen port for receiving logs based on your configuration (line 3)\n\n\t ii. replace **workspace_id** with real value of your Workspace ID (lines 58,59,60,63)\n5. Save changes and restart the Azure Log Analytics agent for Linux service with the following command:\n\t\tsudo /opt/microsoft/omsagent/bin/service_control restart\n6. To configure a remote syslog destination, please reference the [SRX Getting Started - Configure System Logging](https://kb.juniper.net/InfoCenter/index?page=content&id=kb16502)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/JuniperIDP/Data%20Connectors/Connector_LogAnalytics_agent_JuniperIDP.json","true" -"","KQL Training","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/KQL%20Training","microsoftsentinelcommunity","azure-sentinel-solution-kqltraining","2022-11-30","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","","","","","","","false","","false" -"","Keeper Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Keeper%20Security","keepersecurity","keeper-security-integration","2025-06-03","2025-06-03","","Keeper Security","Partner","https://www.keepersecurity.com","","domains","","","","","","","false","","false" -"KeeperSecurityEventNewLogs_CL","Keeper Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Keeper%20Security","keepersecurity","keeper-security-integration","2025-06-03","2025-06-03","","Keeper Security","Partner","https://www.keepersecurity.com","","domains","KeeperSecurityPush2","Keeper Security","Keeper Security Push Connector","The [Keeper Security](https://keepersecurity.com) connector provides the capability to read raw event data from Keeper Security in Microsoft Sentinel.","[{""title"": ""1. Create ARM Resources and Provide the Required Permissions"", ""description"": ""This connector reads data from the tables that Keeper Security uses in a Microsoft Analytics Workspace, if the [data forwarding](https://docs.keepersecurity.com/docs/data-forwarding) option is enabled in Keeper Security then raw event data is sent to the Microsoft Sentinel Ingestion API."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Automated Configuration and Secure Data Ingestion with Entra Application \nClicking on \""Deploy\"" will trigger the creation of Log Analytics tables and a Data Collection Rule (DCR). \nIt will then create an Entra application, link the DCR to it, and set the entered secret in the application. This setup enables data to be sent securely to the DCR using an Entra token.""}}, {""parameters"": {""label"": ""Keeper Security connector resources"", ""applicationDisplayName"": ""Keeper Security Connector Application""}, ""type"": ""DeployPushConnectorButton""}]}, {""title"": ""2. Push your logs into the workspace"", ""description"": ""Use the following parameters to configure the your machine to send the logs to the workspace."", ""instructions"": [{""parameters"": {""label"": ""Tenant ID (Directory ID)"", ""fillWith"": [""TenantId""]}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Application ID"", ""fillWith"": [""ApplicationId""], ""placeholder"": ""Deploy push connector to get the App Registration Application ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Secret"", ""fillWith"": [""ApplicationSecret""], ""placeholder"": ""Deploy push connector to get the App Registration Secret""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Endpoint Uri"", ""fillWith"": [""DataCollectionEndpoint""], ""placeholder"": ""Deploy push connector to get the Data Collection Endpoint Uri""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Rule Immutable ID"", ""fillWith"": [""DataCollectionRuleId""], ""placeholder"": ""Deploy push connector to get the Data Collection Rule Immutable ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Events Logs Stream Name"", ""value"": ""Custom-KeeperSecurityEventNewLogs""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Update Keeper Admin Console"", ""description"": ""Configure the Keeper Admin Console with the Azure connection details to enable data forwarding to Microsoft Sentinel."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configure Azure Monitor Logs in Keeper Admin Console\n\nIn the [Keeper Admin Console](https://keepersecurity.com/console/), login as the Keeper Administrator. Then go to **Reporting & Alerts** and select **Azure Monitor Logs**.\n\nProvide the following information from Step 2 above into the Admin Console:\n\n- **Azure Tenant ID**: You can find this from Azure's \""Subscriptions\"" area.\n- **Application (client) ID**: This is located in the App registration (KeeperLogging) overview screen\n- **Client Secret Value**: This is the Client Secret Value from the app registration secrets.\n- **Endpoint URL**: This is a URL that is created in the following specific format:\n `https:///dataCollectionRules//streams/?api-version=2023-01-01`\n\nTo assemble the Endpoint URL:\n\n- **** This comes from Step 2 above\n- **** From the Data Collector Rule, copy the \""Immutable Id\"" value, e.g. `dcr-xxxxxxx`\n- **
** This is the table name created by Azure, e.g. `Custom-KeeperSecurityEventNewLogs`\n\nExample: `https:///dataCollectionRules//streams/Custom-KeeperSecurityEventNewLogs?api-version=2023-01-01`""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft Entra"", ""description"": ""Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher.""}, {""name"": ""Microsoft Azure"", ""description"": ""Permission to assign Monitoring Metrics Publisher role on data collection rule (DCR). Typically requires Azure RBAC Owner or User Access Administrator role""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Keeper%20Security/Data%20Connectors/KeeperSecurity_ccp/KepperSecurity_Definition.json","true" -"","LastPass","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/LastPass","thecollectiveconsultingbv1584980370320","lastpass-enterprise-monitoring-solution","2021-10-20","2022-01-12","","The Collective Consulting","Partner","https://thecollective.eu","","domains","","","","","","","false","","false" -"LastPassNativePoller_CL","LastPass","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/LastPass","thecollectiveconsultingbv1584980370320","lastpass-enterprise-monitoring-solution","2021-10-20","2022-01-12","","The Collective Consulting","Partner","https://thecollective.eu","","domains","LastPass_Polling","The Collective Consulting BV","LastPass Enterprise - Reporting (Polling CCP)","The [LastPass Enterprise](https://www.lastpass.com/products/enterprise-password-management-and-sso) connector provides the capability to LastPass reporting (audit) logs into Microsoft Sentinel. The connector provides visibility into logins and activity within LastPass (such as reading and removing passwords).","[{""title"": ""Connect LastPass Enterprise to Microsoft Sentinel"", ""description"": ""Provide the LastPass Provisioning API Key."", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""cid"", ""requestObjectKey"": ""queryParametersTemplate"", ""placeHolderName"": ""{{cidPlaceHolder}}""}]}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true, ""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""LastPass API Key and CID"", ""description"": ""A LastPass API key and CID are required. [See the documentation to learn more about LastPass API](https://support.logmeininc.com/lastpass/help/use-the-lastpass-provisioning-api-lp010068).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/LastPass/Data%20Connectors/LastPassAPIConnector.json","true" -"","Legacy IOC based Threat Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Legacy%20IOC%20based%20Threat%20Protection","azuresentinel","azure-sentinel-solution-ioclegacy","2022-12-19","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"","Lookout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Lookout","lookoutinc","lookout_mtd_sentinel","2021-10-18","2025-11-07","3.0.1","Lookout","Partner","https://www.lookout.com/support","","domains","","","","","","","false","","false" -"Lookout_CL","Lookout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Lookout","lookoutinc","lookout_mtd_sentinel","2021-10-18","2025-11-07","3.0.1","Lookout","Partner","https://www.lookout.com/support","","domains","LookoutAPI","Lookout","[DEPRECATED] Lookout","The [Lookout](https://lookout.com) data connector provides the capability to ingest [Lookout](https://enterprise.support.lookout.com/hc/en-us/articles/115002741773-Mobile-Risk-API-Guide#commoneventfields) events into Microsoft Sentinel through the Mobile Risk API. Refer to [API documentation](https://enterprise.support.lookout.com/hc/en-us/articles/115002741773-Mobile-Risk-API-Guide) for more information. The [Lookout](https://lookout.com) data connector provides ability to get events which helps to examine potential security risks and more.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This [Lookout](https://lookout.com) data connector uses Azure Functions to connect to the Mobile Risk API to pull its events into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**LookoutEvents**](https://aka.ms/sentinel-lookoutapi-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Mobile Risk API**\n\n [Follow the instructions](https://enterprise.support.lookout.com/hc/en-us/articles/115002741773-Mobile-Risk-API-Guide#authenticatingwiththemobileriskapi) to obtain the credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - Follow below mentioned instructions to deploy the [Lookout](https://lookout.com) data connector and the associated Azure Function**\n\n>**IMPORTANT:** Before starting the deployment of the [Lookout](https://lookout.com) data connector, make sure to have the Workspace ID and Workspace Key ready (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Workspace Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Follow below steps for automated deployment of the [Lookout](https://lookout.com) data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-lookoutapi-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Region**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **Function Name**, **Workspace ID**,**Workspace Key**,**Enterprise Name** & **Api Key** and deploy. \n4. Click **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Mobile Risk API Credentials/permissions"", ""description"": ""**EnterpriseName** & **ApiKey** are required for Mobile Risk API. [See the documentation to learn more about API](https://enterprise.support.lookout.com/hc/en-us/articles/115002741773-Mobile-Risk-API-Guide). Check all [requirements and follow the instructions](https://enterprise.support.lookout.com/hc/en-us/articles/115002741773-Mobile-Risk-API-Guide#authenticatingwiththemobileriskapi) for obtaining credentials.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Lookout/Data%20Connectors/Lookout_API_FunctionApp.json","true" -"LookoutMtdV2_CL","Lookout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Lookout","lookoutinc","lookout_mtd_sentinel","2021-10-18","2025-11-07","3.0.1","Lookout","Partner","https://www.lookout.com/support","","domains","LookoutStreaming_Definition","Microsoft","Lookout Mobile Threat Detection Connector (via Codeless Connector Framework) (Preview)","The [Lookout Mobile Threat Detection](https://lookout.com) data connector provides the capability to ingest events related to mobile security risks into Microsoft Sentinel through the Mobile Risk API. Refer to [API documentation](https://enterprise.support.lookout.com/hc/en-us/articles/115002741773-Mobile-Risk-API-Guide) for more information. This connector helps you examine potential security risks detected in mobile devices.","[{""title"": ""Connect Lookout Mobile Threat Defence connector to Microsoft Sentinel"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""Before connecting to Lookout, ensure the following prerequisites are completed.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. **ApiKey** is required for Mobile Threat Detection API. See the [documentation](https://enterprise.support.lookout.com/hc/en-us/articles/115002741773-Mobile-Risk-API-Guide) to learn more about API. Check all requirements and follow the [instructions](https://enterprise.support.lookout.com/hc/en-us/articles/115002741773-Mobile-Risk-API-Guide#authenticatingwiththemobileriskapi) for obtaining credentials.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API key"", ""placeholder"": ""Enter your API key "", ""type"": ""password"", ""name"": ""applicationKey"", ""required"": true}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""toggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": false, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Lookout/Data%20Connectors/LookoutStreamingConnector_ccp/LookoutStreaming_DataConnectorDefinition.json","true" -"","Lookout Cloud Security Platform for Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Lookout%20Cloud%20Security%20Platform%20for%20Microsoft%20Sentinel","lookoutinc","lookout_cloudsecurity_sentinel","2023-02-17","","","Lookout","Partner","https://www.lookout.com/support","","domains","","","","","","","false","","false" -"LookoutCloudSecurity_CL","Lookout Cloud Security Platform for Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Lookout%20Cloud%20Security%20Platform%20for%20Microsoft%20Sentinel","lookoutinc","lookout_cloudsecurity_sentinel","2023-02-17","","","Lookout","Partner","https://www.lookout.com/support","","domains","LookoutCloudSecurityDataConnector","Lookout","Lookout Cloud Security for Microsoft Sentinel","This connector uses a Agari REST API connection to push data into Microsoft Sentinel Log Analytics.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Agari REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**Step-by-Step Instructions**\n\n As a prerequisite to this integration, first, you need to configure an API client on Lookout's Management Console. From the Management Console, you can add one or more clients and configure the appropriate permissions and actions for each. \n\n 1. Name - The name given to this client. \n\n 2. Client ID - the unique ID that was provided for this client. \n\n 3. Permissions - The permissions enabled for this client. The permissions you check are those that the client will be allowed to access. The listed options are Activity, Violation, Anomaly, Insights, and Profile \n\n 4. Service URL - The URL used to access this client.It must start with https:// \n\n 5. Authorized IPs - The valid IP address or addresses that apply to this client. \n\n 6. Actions - The actions you can take for this client. Click the icon for the action you want to perform. Editing client information, displaying the client secret, or deleting the client. \n\n **To add a new API client:** \n\n 1. Go to Administration > Enterprise Integration > API Clients and click New. \n\n 2. Enter a Name (required) and a Description (optional). \n\n 3. Enter the Client ID that was provided to you. \n\n 4. Select one or more Permissions from the dropdown list. \n\n 5. Enter one or more Authorized IP addresses for this client. Separate each address with a comma.\n\n 6. Click Save. \n\n When prompted, copy the string for the client's secret. You will need this information (along with the client ID) to authenticate to the API gateway. ""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as Azure Blob Storage connection string and container name, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-LookoutCS-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Lookout Client ID**, **Lookout Client Secret**, **Lookout Base url**, **Microsoft Sentinel Workspace Id**, **Microsoft Sentinel Shared Key**\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-Lookout-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions.\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tLookoutClientId\n\t\tLookoutApiSecret\n\t\tBaseurl\n\t\tWorkspaceID\n\t\tPrimaryKey\n\t\tlogAnalyticsUri (Optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://WORKSPACE_ID.ods.opinsights.azure.us`. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Lookout%20Cloud%20Security%20Platform%20for%20Microsoft%20Sentinel/Data%20Connectors/LookoutCSConnector/LookoutCloudSecurityConnector_API_FunctionApp.json","true" -"","Lumen Defender Threat Feed","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Lumen%20Defender%20Threat%20Feed","centurylink","azure-sentinel-solution-lumen-defender-threat-feed","2025-09-12","2025-09-12","","Lumen Technologies, Inc.","Partner","https://www.lumen.com/en-us/contact-us/support.html","","domains","","","","","","","false","","false" -"ThreatIntelIndicators","Lumen Defender Threat Feed","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Lumen%20Defender%20Threat%20Feed","centurylink","azure-sentinel-solution-lumen-defender-threat-feed","2025-09-12","2025-09-12","","Lumen Technologies, Inc.","Partner","https://www.lumen.com/en-us/contact-us/support.html","","domains","LumenThreatFeedConnector","Lumen Technologies, Inc.","Lumen Defender Threat Feed Data Connector","The [Lumen Defender Threat Feed](https://bll-analytics.mss.lumen.com/analytics) connector provides the capability to ingest STIX-formatted threat intelligence indicators from Lumen's Black Lotus Labs research team into Microsoft Sentinel. The connector automatically downloads and uploads daily threat intelligence indicators including IPv4 addresses and domains to the ThreatIntelIndicators table via the STIX Objects Upload API.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions with Durable Functions to connect to the Lumen Defender Threat Feed API and upload threat intelligence indicators to Microsoft Sentinel via the STIX Objects API. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": ""Configuration"", ""description"": ""**STEP 1 - Obtain Lumen Defender Threat Feed API Key**\n\n1. [Contact Lumen](mailto:DefenderThreatFeedSales@Lumen.com?subject=API%20Access%20Request) to obtain API access to our Threat Feed API service\n2. Obtain your API key for authentication.""}, {""title"": """", ""description"": ""**STEP 2 - Configure Azure Entra ID Application and gather information**\n\n1. Create an Entra application. [See the documentation for a guide to registering an application in Microsoft Entra ID.](https://learn.microsoft.com/en-us/entra/identity-platform/quickstart-register-app)\n2. Create a client secret and note the Application ID, Tenant ID, and Client Secret\n4. Assign the **Microsoft Sentinel Contributor** role to the application on your Microsoft Sentinel Log Analytics Workspace\n5. Make note of your Workspace ID, as well as the App Insights Workspace Resource ID, which can be obtained from the overview page of the Log Analytics Workspace for your Microsoft Sentinel instance. Click on the \u201cJSON View\u201d link in the top right and the Resource ID will be displayed at the top with a copy button."", ""instructions"": [{""parameters"": {""fillWith"": [""TenantId""], ""label"": ""Tenant ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": ""**STEP 3 - Enable the Threat Intelligence Upload Indicators API (Preview) data connector in Microsoft Sentinel**\n\n1. Deploy the **Threat Intelligence (New) Solution**, which includes the **Threat Intelligence Upload Indicators API (Preview)**\n2. Browse to the Content Hub, find and select the **Threat Intelligence (NEW)** solution.\n3. Select the **Install/Update** button.""}, {""title"": """", ""description"": ""**STEP 4 - Deploy the Azure Function**\n\n**IMPORTANT:** Before deploying the Lumen Defender Threat Feed connector, have the Tenant ID, Workspace ID, App Insights Workspace Resource ID, Azure Entra application details (Client ID, Client Secret), and Lumen API key readily available.\n\n1. Click the Deploy to Azure button.\n\n[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FLumen%20Defender%20Threat%20Feed%2FData%2520Connectors%2FLumenThreatFeed%2Fazuredeploy_Connector_LumenThreatFeed_AzureFunction.json)\n\n2. Fill in the appropriate values for each parameter:\n\n- Subscription: Confirm the correct subscription is selected or use the dropdown to change your selection\n- Resource Group: Select the resource group to be used by the Function App and related resources\n- Function Name: Enter a globally unique name with an 11-character limit. Adhere to your organization\u2019s naming convention and ensure the name is globally unique since it is used (along with the uniqueString() function) to identify the ARM template being deployed.\n- Workspace ID: Found in the \""Overview\"" tab for the Log Analytics Workspace of the Microsoft Sentinel instance and provided for convenience on the connector information page.\n- Lumen API Key: Obtain an API key through Lumen support\n- Lumen Base URL: Filled in automatically and should generally not be changed. This URL contains API endpoints used by the connector\n- Tenant ID: Obtained from the Entra App Registration overview page for the registered application (listed as Directory ID) and can also be obtained from the Tenant Information page in Azure\n- Client ID: Obtained from the Entra App Registration overview page for the registered application (listed as Application ID)\n- Client Secret: Obtained when the secret is created during the app registration process. It can only be viewed when first created and is hidden permanently afterwards. Rerun the app registration process to obtain a new Client Secret if necessary.\n- App Insights Workspace Resource ID: Obtained from the overview page of the Log Analytics Workspace for your Microsoft Sentinel instance. Click on the \""JSON View\"" link in the top right and the Resource ID will be displayed at the top with a copy button.\n- Blob Container Name: Use the default name unless otherwise required. Azure Blob Storage is used for temporary storage and processing of threat indicators.""}, {""title"": """", ""description"": ""**STEP 5 - Verify Deployment**\n\n1. The connector polls for indicator updates every 15 minutes.\n2. Monitor the Function App logs in the Azure Portal to verify successful execution\n3. After the app performs its first run, review the indicators ingested by either viewing the \u201cLumen Defender Threat Feed Overview\u201d workbook or viewing the \u201cThreat Intelligence\u201d section in Microsoft Sentinel. In Microsoft Sentinel \u201cThreat Intelligence\u201d, filter for source \u201cLumen\u201d to display only Lumen generated indicators.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and write permissions on the Log Analytics workspace are required."", ""providerDisplayName"": ""Log Analytics Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": false}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Azure Entra App Registration"", ""description"": ""An Azure Entra application registration with the Microsoft Sentinel Contributor role assigned is required for STIX Objects API access. [See the documentation to learn more about Azure Entra applications](https://docs.microsoft.com/azure/active-directory/develop/quickstart-register-app).""}, {""name"": ""Microsoft Sentinel Contributor Role"", ""description"": ""Microsoft Sentinel Contributor role is required for the Azure Entra application to upload threat intelligence indicators.""}, {""name"": ""Lumen Defender Threat Feed API Key"", ""description"": ""A Lumen Defender Threat Feed API Key is required for accessing threat intelligence data. [Contact Lumen for API access](mailto:DefenderThreatFeedSales@Lumen.com?subject=API%20Access%20Request).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Lumen%20Defender%20Threat%20Feed/Data%20Connectors/LumenThreatFeed/LumenThreatFeedConnector_ConnectorUI.json","true" -"","MISP2Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MISP2Sentinel","microsoftsentinelcommunity","azure-sentinel-solution-misp2sentinel","2023-07-29","2023-07-29","","Community","Community","https://github.com/cudeso/misp2sentinel","","domains,verticals","","","","","","","false","","false" -"ThreatIntelligenceIndicator","MISP2Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MISP2Sentinel","microsoftsentinelcommunity","azure-sentinel-solution-misp2sentinel","2023-07-29","2023-07-29","","Community","Community","https://github.com/cudeso/misp2sentinel","","domains,verticals","MISP2SentinelConnector","MISP project & cudeso.be","MISP2Sentinel","This solution installs the MISP2Sentinel connector that allows you to automatically push threat indicators from MISP to Microsoft Sentinel via the Upload Indicators REST API. After installing the solution, configure and enable this data connector by following guidance in Manage solution view.","[{""title"": ""Installation and setup instructions"", ""description"": ""Use the documentation from this GitHub repository to install and configure the MISP to Microsoft Sentinel connector: \n\nhttps://github.com/cudeso/misp2sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.SecurityInsights/threatintelligence/write"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MISP2Sentinel/Data%20Connectors/MISP2SentinelConnector_UploadIndicatorsAPI.json","true" -"","MailGuard 365","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MailGuard%20365","mailguardptylimited","microsoft-sentinel-solution-mailguard365","2023-05-09","2023-06-08","","MailGuard 365","Partner","https://www.mailguard365.com/support/","","domains","","","","","","","false","","false" -"MailGuard365_Threats_CL","MailGuard 365","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MailGuard%20365","mailguardptylimited","microsoft-sentinel-solution-mailguard365","2023-05-09","2023-06-08","","MailGuard 365","Partner","https://www.mailguard365.com/support/","","domains","MailGuard365","MailGuard365","MailGuard 365","MailGuard 365 Enhanced Email Security for Microsoft 365. Exclusive to the Microsoft marketplace, MailGuard 365 is integrated with Microsoft 365 security (incl. Defender) for enhanced protection against advanced email threats like phishing, ransomware and sophisticated BEC attacks.","[{""title"": ""Configure and connect MailGuard 365"", ""description"": ""1. In the MailGuard 365 Console, click **Settings** on the navigation bar.\n2. Click the **Integrations** tab.\n3. Click the **Enable Microsoft Sentinel**.\n4. Enter your workspace id and primary key from the fields below, click **Finish**.\n5. For additional instructions, please contact MailGuard 365 support."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MailGuard%20365/Data%20Connectors/MailGuard365.json","true" -"","MailRisk","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MailRisk","securepracticeas1650887373770","microsoft-sentinel-solution-mailrisk","2023-03-16","2025-10-27","","Secure Practice","Partner","https://securepractice.co/support","","domains","","","","","","","false","","false" -"MailRiskEventEmails_CL","MailRisk","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MailRisk","securepracticeas1650887373770","microsoft-sentinel-solution-mailrisk","2023-03-16","2025-10-27","","Secure Practice","Partner","https://securepractice.co/support","","domains","SecurePracticeMailRiskConnector","Secure Practice","MailRisk by Secure Practice","The MailRisk by Secure Practice connector allows you to ingest email threat intelligence data from the MailRisk API into Microsoft Sentinel. This connector provides visibility into reported emails, risk assessments, and security events related to email threats.","[{""title"": ""1. Obtain Secure Practice API Credentials"", ""description"": ""Log in to your Secure Practice account and generate an API Key and API Secret if you haven't already.""}, {""title"": ""2. Connect to MailRisk API"", ""description"": ""Enter your Secure Practice API credentials below. The credentials will be securely stored and used to authenticate API requests."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""Enter your Secure Practice API Key"", ""type"": ""text"", ""name"": ""apiKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Secret"", ""placeholder"": ""Enter your Secure Practice API Secret"", ""type"": ""password"", ""name"": ""apiSecret""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""disconnectLabel"": ""Disconnect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": false, ""action"": false}}], ""customs"": [{""name"": ""API credentials"", ""description"": ""Your Secure Practice API key pair is also needed, which are created in the [settings in the admin portal](https://manage.securepractice.co/settings/security). Generate a new key pair with description `Microsoft Sentinel`.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MailRisk/Data%20Connectors/MailRisk_CCP/MailRisk_ConnectorDefinition.json","true" -"","Malware Protection Essentials","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Malware%20Protection%20Essentials","azuresentinel","azure-sentinel-solution-malwareprotection","2023-09-25","2023-09-25","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"","MarkLogicAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MarkLogicAudit","azuresentinel","azure-sentinel-solution-marklogicaudit","2022-08-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"MarkLogicAudit_CL","MarkLogicAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MarkLogicAudit","azuresentinel","azure-sentinel-solution-marklogicaudit","2022-08-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MarkLogic","MarkLogic","[Deprecated] MarkLogic Audit","MarkLogic data connector provides the capability to ingest [MarkLogicAudit](https://www.marklogic.com/) logs into Microsoft Sentinel. Refer to [MarkLogic documentation](https://docs.marklogic.com/guide/getting-started) for more information.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias MarkLogicAudit and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MarkLogicAudit/Parsers/MarkLogicAudit.txt) on the second line of the query, enter the hostname(s) of your MarkLogicAudit device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Tomcat Server where the logs are generated.\n\n> Logs from MarkLogic Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure MarkLogicAudit to enable auditing"", ""description"": ""Perform the following steps to enable auditing for a group:\n\n>Access the Admin Interface with a browser;\n\n>Open the Audit Configuration screen (Groups > group_name > Auditing);\n\n>Select True for the Audit Enabled radio button;\n\n>Configure any audit events and/or audit restrictions you want;\n\n>Click OK.\n\n Refer to the [MarkLogic documentation for more details](https://docs.marklogic.com/guide/admin/auditing)""}, {""title"": ""3. Configure the logs to be collected"", ""description"": ""Configure the custom log directory to be collected"", ""instructions"": [{""parameters"": {""linkType"": ""OpenCustomLogsSettings""}, ""type"": ""InstallAgent""}]}, {""title"": """", ""description"": ""1. Select the link above to open your workspace advanced settings \n2. From the left pane, select **Settings**, select **Custom Logs** and click **+Add custom log**\n3. Click **Browse** to upload a sample of a MarkLogicAudit log file. Then, click **Next >**\n4. Select **Timestamp** as the record delimiter and click **Next >**\n5. Select **Windows** or **Linux** and enter the path to MarkLogicAudit logs based on your configuration \n6. After entering the path, click the '+' symbol to apply, then click **Next >** \n7. Add **MarkLogicAudit** as the custom log Name (the '_CL' suffix will be added automatically) and click **Done**.""}, {""title"": ""Validate connectivity"", ""description"": ""It may take upwards of 20 minutes until your logs start to appear in Microsoft Sentinel.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MarkLogicAudit/Data%20Connectors/Connector_MarkLogicAudit.json","true" -"","MaturityModelForEventLogManagementM2131","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MaturityModelForEventLogManagementM2131","azuresentinel","azure-sentinel-solution-maturitymodelforeventlogma","2021-12-05","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"","McAfee Network Security Platform","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/McAfee%20Network%20Security%20Platform","azuresentinel","azure-sentinel-solution-mcafeensp","2021-06-29","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"Syslog","McAfee Network Security Platform","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/McAfee%20Network%20Security%20Platform","azuresentinel","azure-sentinel-solution-mcafeensp","2021-06-29","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","McAfeeNSP","McAfee","[Deprecated] McAfee Network Security Platform","The [McAfee® Network Security Platform](https://www.mcafee.com/enterprise/en-us/products/network-security-platform.html) data connector provides the capability to ingest [McAfee® Network Security Platform events](https://docs.mcafee.com/bundle/network-security-platform-10.1.x-integration-guide-unmanaged/page/GUID-8C706BE9-6AC9-4641-8A53-8910B51207D8.html) into Microsoft Sentinel. Refer to [McAfee® Network Security Platform](https://docs.mcafee.com/bundle/network-security-platform-10.1.x-integration-guide-unmanaged/page/GUID-F7D281EC-1CC9-4962-A7A3-5A9D9584670E.html) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**McAfeeNSPEvent**](https://aka.ms/sentinel-mcafeensp-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**NOTE:** This data connector has been developed using McAfee\u00ae Network Security Platform version: 10.1.x"", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Server where the McAfee\u00ae Network Security Platform logs are forwarded.\n\n> Logs from McAfee\u00ae Network Security Platform Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure McAfee\u00ae Network Security Platform event forwarding"", ""description"": ""Follow the configuration steps below to get McAfee\u00ae Network Security Platform logs into Microsoft Sentinel.\n1. [Follow these instructions](https://docs.mcafee.com/bundle/network-security-platform-10.1.x-product-guide/page/GUID-E4A687B0-FAFB-4170-AC94-1D968A10380F.html) to forward alerts from the Manager to a syslog server.\n2. Add a syslog notification profile, [more details here](https://docs.mcafee.com/bundle/network-security-platform-10.1.x-product-guide/page/GUID-5BADD5D7-21AE-4E3B-AEE2-A079F3FD6A38.html). This is mandatory. While creating profile, to make sure that events are formatted correctly, enter the following text in the Message text box:\n\t\t:|SENSOR_ALERT_UUID|ALERT_TYPE|ATTACK_TIME|ATTACK_NAME|ATTACK_ID\n\t\t|ATTACK_SEVERITY|ATTACK_SIGNATURE|ATTACK_CONFIDENCE|ADMIN_DOMAIN|SENSOR_NAME|INTERFACE\n\t\t|SOURCE_IP|SOURCE_PORT|DESTINATION_IP|DESTINATION_PORT|CATEGORY|SUB_CATEGORY\n\t\t|DIRECTION|RESULT_STATUS|DETECTION_MECHANISM|APPLICATION_PROTOCOL|NETWORK_PROTOCOL|""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/McAfee%20Network%20Security%20Platform/Data%20Connectors/McAfeeNSP.json","true" -"","McAfee ePolicy Orchestrator","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/McAfee%20ePolicy%20Orchestrator","azuresentinel","azure-sentinel-solution-mcafeeepo","2021-03-25","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"Syslog","McAfee ePolicy Orchestrator","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/McAfee%20ePolicy%20Orchestrator","azuresentinel","azure-sentinel-solution-mcafeeepo","2021-03-25","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","McAfeeePO","McAfee","[Deprecated] McAfee ePolicy Orchestrator (ePO)","The McAfee ePolicy Orchestrator data connector provides the capability to ingest [McAfee ePO](https://www.mcafee.com/enterprise/en-us/products/epolicy-orchestrator.html) events into Microsoft Sentinel through the syslog. Refer to [documentation](https://docs.mcafee.com/bundle/epolicy-orchestrator-landing/page/GUID-0C40020F-5B7F-4549-B9CC-0E017BC8797F.html) for more information.","[{""title"": """", ""description"": "">This data connector depends on a parser based on a Kusto Function to work as expected [**McAfeeEPOEvent**](https://aka.ms/sentinel-McAfeeePO-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n2. Select **Apply below configuration to my machines** and select the facilities and severities.\n3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure McAfee ePolicy Orchestrator event forwarding to Syslog server"", ""description"": ""[Follow these instructions](https://kcm.trellix.com/corporate/index?page=content&id=KB87927) to add register syslog server.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/McAfee%20ePolicy%20Orchestrator/Data%20Connectors/Connector_McAfee_ePO.json","true" -"","Microsoft 365","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20365","azuresentinel","azure-sentinel-solution-office365","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"OfficeActivity","Microsoft 365","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20365","azuresentinel","azure-sentinel-solution-office365","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","Office365","Microsoft","Microsoft 365 (formerly, Office 365)","The Microsoft 365 (formerly, Office 365) activity log connector provides insight into ongoing user activities. You will get details of operations such as file downloads, access requests sent, changes to group events, set-mailbox and details of the user who performed the actions. By connecting Microsoft 365 logs into Microsoft Sentinel you can use this data to view dashboards, create custom alerts, and improve your investigation process. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2219943&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Connect Office 365 activity logs to your Microsoft Sentinel."", ""description"": ""Select the record types you want to collect from your tenant and click **Apply Changes**."", ""instructions"": [{""type"": ""OfficeDataTypes"", ""parameters"": {""connectorKind"": ""Office365"", ""dataTypes"": [{""title"": ""Exchange"", ""name"": ""exchange""}, {""title"": ""SharePoint"", ""name"": ""sharePoint""}, {""title"": ""Teams"", ""name"": ""teams""}]}}]}, {""title"": ""2. Previously connected tenants"", ""description"": ""Microsoft Sentinel now enables Office 365 single-tenant connection. You can modify your previously connected tenants and click **Save**."", ""instructions"": [{""type"": ""Office365""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""tenant"": [""GlobalAdmin"", ""SecurityAdmin""]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20365/Data%20Connectors/Microsoft365.JSON","true" -"exchange","Microsoft 365","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20365","azuresentinel","azure-sentinel-solution-office365","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","Office365","Microsoft","Microsoft 365 (formerly, Office 365)","The Microsoft 365 (formerly, Office 365) activity log connector provides insight into ongoing user activities. You will get details of operations such as file downloads, access requests sent, changes to group events, set-mailbox and details of the user who performed the actions. By connecting Microsoft 365 logs into Microsoft Sentinel you can use this data to view dashboards, create custom alerts, and improve your investigation process. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2219943&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Connect Office 365 activity logs to your Microsoft Sentinel."", ""description"": ""Select the record types you want to collect from your tenant and click **Apply Changes**."", ""instructions"": [{""type"": ""OfficeDataTypes"", ""parameters"": {""connectorKind"": ""Office365"", ""dataTypes"": [{""title"": ""Exchange"", ""name"": ""exchange""}, {""title"": ""SharePoint"", ""name"": ""sharePoint""}, {""title"": ""Teams"", ""name"": ""teams""}]}}]}, {""title"": ""2. Previously connected tenants"", ""description"": ""Microsoft Sentinel now enables Office 365 single-tenant connection. You can modify your previously connected tenants and click **Save**."", ""instructions"": [{""type"": ""Office365""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""tenant"": [""GlobalAdmin"", ""SecurityAdmin""]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20365/Data%20Connectors/Microsoft365.JSON","true" -"sharePoint","Microsoft 365","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20365","azuresentinel","azure-sentinel-solution-office365","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","Office365","Microsoft","Microsoft 365 (formerly, Office 365)","The Microsoft 365 (formerly, Office 365) activity log connector provides insight into ongoing user activities. You will get details of operations such as file downloads, access requests sent, changes to group events, set-mailbox and details of the user who performed the actions. By connecting Microsoft 365 logs into Microsoft Sentinel you can use this data to view dashboards, create custom alerts, and improve your investigation process. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2219943&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Connect Office 365 activity logs to your Microsoft Sentinel."", ""description"": ""Select the record types you want to collect from your tenant and click **Apply Changes**."", ""instructions"": [{""type"": ""OfficeDataTypes"", ""parameters"": {""connectorKind"": ""Office365"", ""dataTypes"": [{""title"": ""Exchange"", ""name"": ""exchange""}, {""title"": ""SharePoint"", ""name"": ""sharePoint""}, {""title"": ""Teams"", ""name"": ""teams""}]}}]}, {""title"": ""2. Previously connected tenants"", ""description"": ""Microsoft Sentinel now enables Office 365 single-tenant connection. You can modify your previously connected tenants and click **Save**."", ""instructions"": [{""type"": ""Office365""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""tenant"": [""GlobalAdmin"", ""SecurityAdmin""]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20365/Data%20Connectors/Microsoft365.JSON","true" -"","Microsoft 365 Assets","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20365%20Assets","azuresentinel","azure-sentinel-solution-m365asset","2025-06-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"","Microsoft 365 Assets","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20365%20Assets","azuresentinel","azure-sentinel-solution-m365asset","2025-06-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","M365Assets","Microsoft","Microsoft 365 Assets (formerly, Office 365)","The Microsoft 365 (formerly, Office 365) asset connector gives richer insights into ongoing user activities in Microsoft Sentinel by supplementing activity logs with details such as owners, permissions, retention policies and sensitivity labels.

Data from this connector is used to build data risk graphs in Purview. If you've enabled those graphs, deactivating this connector will prevent the graphs from being built. [Learn about the data risk graph](https://go.microsoft.com/fwlink/?linkid=2320023).

This connector is in limited private preview.","[{""instructions"": [{""parameters"": {""type"": ""M365Assets"", ""description"": ""1.Connect Microsoft 365 assets (formerly, Office 365) to Microsoft Sentinel."", ""items"": [{""label"": ""SharePoint and OneDrive""}]}, ""type"": ""MSG""}]}]","{""tenant"": [""GlobalAdmin"", ""SecurityAdmin""], ""customs"": [{""name"": ""OfficeActivity table availability"", ""description"": ""Enable the OfficeActivity table in Log Analytics.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20365%20Assets/Data%20Connectors/M365Asset_DataConnectorDefinition.json","true" -"","Microsoft Business Applications","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Business%20Applications","sentinel4dynamics365","powerplatform","2023-04-19","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"FinanceOperationsActivity_CL","Microsoft Business Applications","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Business%20Applications","sentinel4dynamics365","powerplatform","2023-04-19","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","Dynamics365Finance","Microsoft","Dynamics 365 Finance and Operations","Dynamics 365 for Finance and Operations is a comprehensive Enterprise Resource Planning (ERP) solution that combines financial and operational capabilities to help businesses manage their day-to-day operations. It offers a range of features that enable businesses to streamline workflows, automate tasks, and gain insights into operational performance.

The Dynamics 365 Finance and Operations data connector ingests Dynamics 365 Finance and Operations admin activities and audit logs as well as user business process and application activities logs into Microsoft Sentinel.","[{""description"": "">Connectivity to Finance and Operations requires a Microsoft Entra app registration (client ID and secret). You'll also need the Microsoft Entra tenant ID and the Finance Operations Organization URL.""}, {""description"": ""To enable data collection, create a role in Dynamics 365 Finance and Operations with permissions to view the Database Log entity. Assign this role to a dedicated Finance and Operations user, mapped to the client ID of a Microsoft Entra app registration. Follow these steps to complete the process:""}, {""title"": ""Step 1 - Microsoft Entra app registration"", ""description"": ""1. Navigate to the [Microsoft Entra portal](https://entra.microsoft.com). \n2. Under Applications, click on **App Registrations** and create a new app registration (leave all defaults).\n3. Open the new app registration and create a new secret.\n4. Retain the **Tenant ID**, **Application (client) ID**, and **Client secret** for later use.""}, {""title"": ""Step 2 - Create a role for data collection in Finance and Operations"", ""description"": ""1. In the Finance and Operations portal, navigate to **Workspaces > System administration** and click **Security Configuration**\n2. Under **Roles** click **Create new** and give the new role a name e.g. Database Log Viewer.\n3. Select the new role in the list of roles and click **Privileges** and than **Add references**.\n4. Select **Database log Entity View** from the list of privileges.\n5. Click on **Unpublished objects** and then **Publish all** to publish the role.""}, {""title"": ""Step 3 - Create a user for data collection in Finance and Operations"", ""description"": ""1. In the Finance and Operations portal, navigate to **Modules > System administration** and click **Users**\n2. Create a new user and assign the role created in the previous step to the user.""}, {""title"": ""Step 4 - Register the Microsoft Entra app in Finance and Operations"", ""description"": ""1. In the F&O portal, navigate to **System administration > Setup > Microsoft Entra applications** (Azure Active Directory applications)\n2. Create a new entry in the table. In the **Client Id** field, enter the application ID of the app registered in Step 1.\n3. In the **Name** field, enter a name for the application.\n4. In the **User ID** field, select the user ID created in the previous step.""}, {""description"": ""Connect using client credentials"", ""title"": ""Connect events from Dyanmics 365 Finance and Operations to Microsoft Sentinel"", ""instructions"": [{""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""label"": ""Add environment"", ""isPrimary"": true, ""title"": ""Dynamics 365 Finance and Operations connection"", ""instructionSteps"": [{""title"": ""Environment details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Microsoft Entra tenant ID."", ""placeholder"": ""Tenant ID (GUID)"", ""type"": ""text"", ""name"": ""tenantId""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""App registration client ID"", ""placeholder"": ""Finance and Operations client ID"", ""type"": ""text"", ""name"": ""clientId""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""App registration client secret"", ""placeholder"": ""Finance and Operations client secret"", ""type"": ""password"", ""name"": ""clientSecret""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Finance and Operations organization URL"", ""placeholder"": ""https://dynamics-dev.axcloud.dynamics.com"", ""type"": ""text"", ""name"": ""auditHost""}}]}]}}]}, {""title"": ""Organizations"", ""description"": ""Each row represents an Finance and Operations connection"", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Environment URL"", ""columnValue"": ""properties.request.apiEndpoint""}], ""menuItems"": [""DeleteConnector""]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft Entra app registration"", ""description"": ""Application client ID and secret used to access Dynamics 365 Finance and Operations.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Business%20Applications/Data%20Connectors/DynamicsFinOpsPollerConnector/DynamicsFinOps_DataConnectorDefinition.json","true" -"","Microsoft Copilot","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Copilot","azuresentinel","azure-sentinel-solution-microsoftcopilot","2025-10-01","","","Microsoft","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"CopilotActivity","Microsoft Copilot","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Copilot","azuresentinel","azure-sentinel-solution-microsoftcopilot","2025-10-01","","","Microsoft","Microsoft","https://support.microsoft.com","","domains","MicrosoftCopilot","Microsoft","Microsoft Copilot","The Microsoft Copilot logs connector in Microsoft Sentinel enables the seamless ingestion of Copilot-generated activity logs into Microsoft Sentinel for advanced threat detection, investigation, and response. It collects telemetry from Microsoft Copilot services - such as usage data, prompts and system responses - and ingests into Microsoft Sentinel, allowing security teams to monitor for misuse, detect anomalies, and maintain compliance with organizational policies.","[{""title"": ""Connect Microsoft Copilot audit logs to Microsoft Sentinel"", ""description"": ""This connector uses the Office Management API to get your Microsoft Copilot audit logs. The logs will be stored and processed in your existing Microsoft Sentinel workspace. You can find the data in the **CopilotActivity** table."", ""instructions"": [{""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""toggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Tenant Permissions"", ""description"": ""'Security Administrator' or 'Global Administrator' on the workspace's tenant.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Copilot/Data%20Connectors/MicrosoftCopilot_ConnectorDefinition.json","true" -"","Microsoft Defender For Identity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20For%20Identity","azuresentinel","azure-sentinel-solution-mdefenderforidentity","2022-04-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"SecurityAlert","Microsoft Defender For Identity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20For%20Identity","azuresentinel","azure-sentinel-solution-mdefenderforidentity","2022-04-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureAdvancedThreatProtection","Microsoft","Microsoft Defender for Identity","Connect Microsoft Defender for Identity to gain visibility into the events and user analytics. Microsoft Defender for Identity identifies, detects, and helps you investigate advanced threats, compromised identities, and malicious insider actions directed at your organization. Microsoft Defender for Identity enables SecOp analysts and security professionals struggling to detect advanced attacks in hybrid environments to:

- Monitor users, entity behavior, and activities with learning-based analytics​
- Protect user identities and credentials stored in Active Directory
- Identify and investigate suspicious user activities and advanced attacks throughout the kill chain
- Provide clear incident information on a simple timeline for fast triage

[Try now >](https://aka.ms/AtpTryNow)

[Deploy now >](https://aka.ms/AzureATP_Deploy)

For more information, see the [Microsoft Sentinel documentation >](https://go.microsoft.com/fwlink/p/?linkid=2220069&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Connect Microsoft Defender for Identity to Microsoft Sentinel"", ""description"": ""If your tenant is running [Microsoft Defender for Identity](https://aka.ms/Sentinel/MDI/Preview) in Microsoft Defender for Cloud Apps, connect here to stream your Microsoft Defender for Identity alerts into Microsoft Sentinel\n\n> In order to integrate with Microsoft Defender for Identity alerts, use **global administrator**, or **security administrator** permission."", ""instructions"": [{""parameters"": {""title"": ""Microsoft Defender for Identity"", ""connectorKind"": ""AzureAdvancedThreatProtection"", ""enable"": true, ""consentText"": ""Yes, I have connected Microsoft Defender for Identity to Microsoft Defender for Cloud Apps""}, ""type"": ""SentinelResourceProvider""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""tenant"": [""SecurityAdmin"", ""GlobalAdmin""], ""licenses"": [""Aatp""]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20For%20Identity/Data%20Connectors/MicrosoftDefenderforIdentity.JSON","true" -"","Microsoft Defender Threat Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20Threat%20Intelligence","azuresentinel","azure-sentinel-solution-microsoftdefenderthreatint","2023-03-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"","Microsoft Defender XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR","azuresentinel","azure-sentinel-solution-microsoft365defender","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"AlertEvidence","Microsoft Defender XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR","azuresentinel","azure-sentinel-solution-microsoft365defender","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MicrosoftThreatProtection","Microsoft","Microsoft Defender XDR","Microsoft Defender XDR is a unified, natively integrated, pre- and post-breach enterprise defense suite that protects endpoint, identity, email, and applications and helps you detect, prevent, investigate, and automatically respond to sophisticated threats.

Microsoft Defender XDR suite includes:
- Microsoft Defender for Endpoint
- Microsoft Defender for Identity
- Microsoft Defender for Office 365
- Threat & Vulnerability Management
- Microsoft Defender for Cloud Apps

For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220004&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Connect incidents & alerts"", ""description"": ""Connect Microsoft Defender XDR incidents to your Microsoft Sentinel. Incidents will appear in the incidents queue."", ""isComingSoon"": false, ""bottomBorder"": true, ""instructions"": [{""parameters"": {}, ""type"": ""MicrosoftThreatProtection""}]}, {""title"": ""Connect events"", ""instructions"": [{""parameters"": {}, ""type"": ""MicrosoftDefenderATPEvents""}]}]","{""tenant"": [""GlobalAdmin"", ""SecurityAdmin""], ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""License"", ""description"": ""M365 E5, M365 A5 or any other Microsoft Defender XDR eligible license.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR/Data%20Connectors/MicrosoftThreatProtection.JSON","true" -"SecurityAlert","Microsoft Defender XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR","azuresentinel","azure-sentinel-solution-microsoft365defender","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MicrosoftThreatProtection","Microsoft","Microsoft Defender XDR","Microsoft Defender XDR is a unified, natively integrated, pre- and post-breach enterprise defense suite that protects endpoint, identity, email, and applications and helps you detect, prevent, investigate, and automatically respond to sophisticated threats.

Microsoft Defender XDR suite includes:
- Microsoft Defender for Endpoint
- Microsoft Defender for Identity
- Microsoft Defender for Office 365
- Threat & Vulnerability Management
- Microsoft Defender for Cloud Apps

For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220004&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Connect incidents & alerts"", ""description"": ""Connect Microsoft Defender XDR incidents to your Microsoft Sentinel. Incidents will appear in the incidents queue."", ""isComingSoon"": false, ""bottomBorder"": true, ""instructions"": [{""parameters"": {}, ""type"": ""MicrosoftThreatProtection""}]}, {""title"": ""Connect events"", ""instructions"": [{""parameters"": {}, ""type"": ""MicrosoftDefenderATPEvents""}]}]","{""tenant"": [""GlobalAdmin"", ""SecurityAdmin""], ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""License"", ""description"": ""M365 E5, M365 A5 or any other Microsoft Defender XDR eligible license.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR/Data%20Connectors/MicrosoftThreatProtection.JSON","true" -"SecurityIncident","Microsoft Defender XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR","azuresentinel","azure-sentinel-solution-microsoft365defender","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MicrosoftThreatProtection","Microsoft","Microsoft Defender XDR","Microsoft Defender XDR is a unified, natively integrated, pre- and post-breach enterprise defense suite that protects endpoint, identity, email, and applications and helps you detect, prevent, investigate, and automatically respond to sophisticated threats.

Microsoft Defender XDR suite includes:
- Microsoft Defender for Endpoint
- Microsoft Defender for Identity
- Microsoft Defender for Office 365
- Threat & Vulnerability Management
- Microsoft Defender for Cloud Apps

For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220004&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Connect incidents & alerts"", ""description"": ""Connect Microsoft Defender XDR incidents to your Microsoft Sentinel. Incidents will appear in the incidents queue."", ""isComingSoon"": false, ""bottomBorder"": true, ""instructions"": [{""parameters"": {}, ""type"": ""MicrosoftThreatProtection""}]}, {""title"": ""Connect events"", ""instructions"": [{""parameters"": {}, ""type"": ""MicrosoftDefenderATPEvents""}]}]","{""tenant"": [""GlobalAdmin"", ""SecurityAdmin""], ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""License"", ""description"": ""M365 E5, M365 A5 or any other Microsoft Defender XDR eligible license.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR/Data%20Connectors/MicrosoftThreatProtection.JSON","true" -"","Microsoft Defender for Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20for%20Cloud","azuresentinel","azure-sentinel-solution-microsoftdefenderforcloud","2022-05-17","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"SecurityAlert","Microsoft Defender for Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20for%20Cloud","azuresentinel","azure-sentinel-solution-microsoftdefenderforcloud","2022-05-17","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AzureSecurityCenter","Microsoft","Subscription-based Microsoft Defender for Cloud (Legacy)","Microsoft Defender for Cloud is a security management tool that allows you to detect and quickly respond to threats across Azure, hybrid, and multi-cloud workloads. This connector allows you to stream your security alerts from Microsoft Defender for Cloud into Microsoft Sentinel, so you can view Defender data in workbooks, query it to produce alerts, and investigate and respond to incidents.

[For more information>](https://aka.ms/ASC-Connector)","[{""title"": ""Connect Microsoft Defender for Cloud to Microsoft Sentinel"", ""description"": ""Mark the check box of each Azure subscription whose alerts you want to import into Microsoft Sentinel, then select **Connect** above the list.\n\n> The connector can be enabled only on subscriptions that have at least one Microsoft Defender plan enabled in Microsoft Defender for Cloud, and only by users with Security Reader permissions on the subscription."", ""instructions"": [{""parameters"": {}, ""type"": ""AzureSecurityCenterSubscriptions""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""License"", ""description"": ""The connector is available for all deployments of Microsoft Defender for Cloud.""}, {""name"": ""Subscription"", ""description"": ""[read security data](https://docs.microsoft.com/azure/security-center/security-center-permissions).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20for%20Cloud/Data%20Connectors/AzureSecurityCenter.JSON","true" -"SecurityAlert","Microsoft Defender for Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20for%20Cloud","azuresentinel","azure-sentinel-solution-microsoftdefenderforcloud","2022-05-17","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MicrosoftDefenderForCloudTenantBased","Microsoft","Tenant-based Microsoft Defender for Cloud","Microsoft Defender for Cloud is a security management tool that allows you to detect and quickly respond to threats across Azure, hybrid, and multi-cloud workloads. This connector allows you to stream your MDC security alerts from Microsoft 365 Defender into Microsoft Sentinel, so you can can leverage the advantages of XDR correlations connecting the dots across your cloud resources, devices and identities and view the data in workbooks, queries and investigate and respond to incidents. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2269832&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Connect Tenant-based Microsoft Defender for Cloud to Microsoft Sentinel"", ""description"": ""After connecting this connector, **all** your Microsoft Defender for Cloud subscriptions' alerts will be sent to this Microsoft Sentinel workspace.\n\n> Your Microsoft Defender for Cloud alerts are connected to stream through the Microsoft 365 Defender. To benefit from automated grouping of the alerts into incidents, connect the Microsoft 365 Defender incidents connector. Incidents can be viewed in the incidents queue."", ""instructions"": [{""parameters"": {""title"": ""Tenant-based Microsoft Defender for Cloud"", ""connectorKind"": ""MicrosoftDefenderForCloudTenantBased"", ""enable"": true, ""newPipelineEnabledFeatureFlagConfig"": {""feature"": ""MdcAlertsByMtp"", ""featureStates"": {""1"": 2, ""2"": 2, ""3"": 2, ""4"": 2, ""5"": 2}}, ""infoBoxMessage"": ""Your Microsoft Defender for Cloud alerts are connected to stream through the Microsoft 365 Defender. To benefit from automated grouping of the alerts into incidents, connect the Microsoft 365 Defender incidents connector. Incidents can be viewed in the incidents queue"", ""shouldAlwaysDisplayInfoMessage"": true}, ""type"": ""MicrosoftDefenderForCloudTenantBased""}]}]","{""tenant"": [""SecurityAdmin"", ""GlobalAdmin""], ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""tenantMember"": true}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20for%20Cloud/Data%20Connectors/MicrosoftDefenderForCloudTenantBased.json","true" -"","Microsoft Defender for Cloud Apps","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20for%20Cloud%20Apps","azuresentinel","azure-sentinel-solution-microsoftdefendercloudapps","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"McasShadowItReporting","Microsoft Defender for Cloud Apps","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20for%20Cloud%20Apps","azuresentinel","azure-sentinel-solution-microsoftdefendercloudapps","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MicrosoftCloudAppSecurity","Microsoft","Microsoft Defender for Cloud Apps","By connecting with [Microsoft Defender for Cloud Apps](https://aka.ms/asi-mcas-connector-description) you will gain visibility into your cloud apps, get sophisticated analytics to identify and combat cyberthreats, and control how your data travels.

- Identify shadow IT cloud apps on your network.
- Control and limit access based on conditions and session context.
- Use built-in or custom policies for data sharing and data loss prevention.
- Identify high-risk use and get alerts for unusual user activities with Microsoft behavioral analytics and anomaly detection capabilities, including ransomware activity, impossible travel, suspicious email forwarding rules, and mass download of files.
- Mass download of files

[Deploy now >](https://aka.ms/asi-mcas-connector-deploynow)","[{""title"": ""Connect Microsoft Defender for Cloud Apps to Microsoft Sentinel"", ""description"": ""In the Microsoft Defender for Cloud Apps portal, under Settings, select Security extensions and then SIEM and set Microsoft Sentinel as your SIEM agent. For more information, see [Microsoft Defender for Cloud Apps](https://aka.ms/azuresentinelmcas) .\n\nAfter you connect Microsoft Defender for Cloud Apps, the alerts and discovery logs are sent to this Microsoft Sentinel workspace.\u200b"", ""instructions"": [{""parameters"": {""connectorKind"": ""MicrosoftCloudAppSecurity"", ""dataTypes"": [{""title"": ""Alerts"", ""name"": ""alerts""}, {""title"": ""Cloud Discovery Logs (Preview)"", ""name"": ""discoveryLogs""}]}, ""type"": ""MCasDataTypes""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""tenant"": [""GlobalAdmin"", ""SecurityAdmin""], ""licenses"": [""Mcas""]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20for%20Cloud%20Apps/Data%20Connectors/MicrosoftCloudAppSecurity.JSON","true" -"SecurityAlert","Microsoft Defender for Cloud Apps","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20for%20Cloud%20Apps","azuresentinel","azure-sentinel-solution-microsoftdefendercloudapps","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MicrosoftCloudAppSecurity","Microsoft","Microsoft Defender for Cloud Apps","By connecting with [Microsoft Defender for Cloud Apps](https://aka.ms/asi-mcas-connector-description) you will gain visibility into your cloud apps, get sophisticated analytics to identify and combat cyberthreats, and control how your data travels.

- Identify shadow IT cloud apps on your network.
- Control and limit access based on conditions and session context.
- Use built-in or custom policies for data sharing and data loss prevention.
- Identify high-risk use and get alerts for unusual user activities with Microsoft behavioral analytics and anomaly detection capabilities, including ransomware activity, impossible travel, suspicious email forwarding rules, and mass download of files.
- Mass download of files

[Deploy now >](https://aka.ms/asi-mcas-connector-deploynow)","[{""title"": ""Connect Microsoft Defender for Cloud Apps to Microsoft Sentinel"", ""description"": ""In the Microsoft Defender for Cloud Apps portal, under Settings, select Security extensions and then SIEM and set Microsoft Sentinel as your SIEM agent. For more information, see [Microsoft Defender for Cloud Apps](https://aka.ms/azuresentinelmcas) .\n\nAfter you connect Microsoft Defender for Cloud Apps, the alerts and discovery logs are sent to this Microsoft Sentinel workspace.\u200b"", ""instructions"": [{""parameters"": {""connectorKind"": ""MicrosoftCloudAppSecurity"", ""dataTypes"": [{""title"": ""Alerts"", ""name"": ""alerts""}, {""title"": ""Cloud Discovery Logs (Preview)"", ""name"": ""discoveryLogs""}]}, ""type"": ""MCasDataTypes""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""tenant"": [""GlobalAdmin"", ""SecurityAdmin""], ""licenses"": [""Mcas""]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20for%20Cloud%20Apps/Data%20Connectors/MicrosoftCloudAppSecurity.JSON","true" -"discoveryLogs","Microsoft Defender for Cloud Apps","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20for%20Cloud%20Apps","azuresentinel","azure-sentinel-solution-microsoftdefendercloudapps","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MicrosoftCloudAppSecurity","Microsoft","Microsoft Defender for Cloud Apps","By connecting with [Microsoft Defender for Cloud Apps](https://aka.ms/asi-mcas-connector-description) you will gain visibility into your cloud apps, get sophisticated analytics to identify and combat cyberthreats, and control how your data travels.

- Identify shadow IT cloud apps on your network.
- Control and limit access based on conditions and session context.
- Use built-in or custom policies for data sharing and data loss prevention.
- Identify high-risk use and get alerts for unusual user activities with Microsoft behavioral analytics and anomaly detection capabilities, including ransomware activity, impossible travel, suspicious email forwarding rules, and mass download of files.
- Mass download of files

[Deploy now >](https://aka.ms/asi-mcas-connector-deploynow)","[{""title"": ""Connect Microsoft Defender for Cloud Apps to Microsoft Sentinel"", ""description"": ""In the Microsoft Defender for Cloud Apps portal, under Settings, select Security extensions and then SIEM and set Microsoft Sentinel as your SIEM agent. For more information, see [Microsoft Defender for Cloud Apps](https://aka.ms/azuresentinelmcas) .\n\nAfter you connect Microsoft Defender for Cloud Apps, the alerts and discovery logs are sent to this Microsoft Sentinel workspace.\u200b"", ""instructions"": [{""parameters"": {""connectorKind"": ""MicrosoftCloudAppSecurity"", ""dataTypes"": [{""title"": ""Alerts"", ""name"": ""alerts""}, {""title"": ""Cloud Discovery Logs (Preview)"", ""name"": ""discoveryLogs""}]}, ""type"": ""MCasDataTypes""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""tenant"": [""GlobalAdmin"", ""SecurityAdmin""], ""licenses"": [""Mcas""]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20for%20Cloud%20Apps/Data%20Connectors/MicrosoftCloudAppSecurity.JSON","true" -"","Microsoft Defender for Office 365","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20for%20Office%20365","azuresentinel","azure-sentinel-solution-microsoftdefenderforo365","2022-05-17","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"SecurityAlert","Microsoft Defender for Office 365","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20for%20Office%20365","azuresentinel","azure-sentinel-solution-microsoftdefenderforo365","2022-05-17","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","OfficeATP","Microsoft","Microsoft Defender for Office 365 (Preview)","Microsoft Defender for Office 365 safeguards your organization against malicious threats posed by email messages, links (URLs) and collaboration tools. By ingesting Microsoft Defender for Office 365 alerts into Microsoft Sentinel, you can incorporate information about email- and URL-based threats into your broader risk analysis and build response scenarios accordingly.

The following types of alerts will be imported:

- A potentially malicious URL click was detected
- Email messages containing malware removed after delivery
- Email messages containing phish URLs removed after delivery
- Email reported by user as malware or phish
- Suspicious email sending patterns detected
- User restricted from sending email

These alerts can be seen by Office customers in the ** Office Security and Compliance Center**.

For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2219942&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Connect Microsoft Defender for Office 365 alerts to Microsoft Sentinel"", ""description"": ""Connecting Microsoft Defender for Office 365 will cause your data that is collected by Microsoft Defender for Office 365 service to be stored and processed in the location that you have configured your Microsoft Sentinel workspace."", ""instructions"": [{""parameters"": {""connectorKind"": ""OfficeATP"", ""title"": ""Microsoft Defender for Office 365"", ""enable"": true}, ""type"": ""SentinelResourceProvider""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""tenant"": [""GlobalAdmin"", ""SecurityAdmin""], ""licenses"": [""OfficeATP""]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20for%20Office%20365/Data%20Connectors/template_OfficeATP.json","true" -"","Microsoft Entra ID","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID","azuresentinel","azure-sentinel-solution-azureactivedirectory","2022-05-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"AADRiskyServicePrincipals","Microsoft Entra ID","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID","azuresentinel","azure-sentinel-solution-azureactivedirectory","2022-05-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureActiveDirectory","Microsoft","Microsoft Entra ID","Gain insights into Microsoft Entra ID by connecting Audit and Sign-in logs to Microsoft Sentinel to gather insights around Microsoft Entra ID scenarios. You can learn about app usage, conditional access policies, legacy auth relate details using our Sign-in logs. You can get information on your Self Service Password Reset (SSPR) usage, Microsoft Entra ID Management activities like user, group, role, app management using our Audit logs table. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/?linkid=2219715&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Connect Microsoft Entra ID logs to Microsoft Sentinel"", ""description"": ""Select Microsoft Entra ID log types:"", ""instructions"": [{""parameters"": {""connectorKind"": ""AzureActiveDirectory"", ""dataTypes"": [{""title"": ""Sign-In Logs"", ""name"": ""SignInLogs"", ""infoBoxHtmlTemplate"": ""In order to export Sign-in data, your organization needs Microsoft Entra ID P1 or P2 license. If you don't have a P1 or P2, start a free trial.""}, {""title"": ""Audit Logs"", ""name"": ""AuditLogs""}, {""title"": ""Non-Interactive User Sign-In Log"", ""name"": ""NonInteractiveUserSignInLogs""}, {""title"": ""Service Principal Sign-In Logs"", ""name"": ""ServicePrincipalSignInLogs""}, {""title"": ""Managed Identity Sign-In Logs"", ""name"": ""ManagedIdentitySignInLogs""}, {""title"": ""Provisioning Logs"", ""name"": ""ProvisioningLogs""}, {""title"": ""ADFS Sign-In Logs"", ""name"": ""ADFSSignInLogs""}, {""title"": ""User Risk Events"", ""name"": ""UserRiskEvents""}, {""title"": ""Risky Users"", ""name"": ""RiskyUsers""}, {""title"": ""Network Access Traffic Logs"", ""name"": ""NetworkAccessTrafficLogs""}, {""title"": ""Risky Service Principals"", ""name"": ""RiskyServicePrincipals""}, {""title"": ""Service Principal Risk Events"", ""name"": ""ServicePrincipalRiskEvents""}]}, ""type"": ""AADDataTypes""}]}]","{""tenant"": [""GlobalAdmin"", ""SecurityAdmin""], ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""microsoft.aadiam/diagnosticSettings"", ""providerDisplayName"": ""Diagnostic Settings"", ""permissionsDisplayText"": ""read and write permissions to AAD diagnostic settings."", ""requiredPermissions"": {""read"": true, ""write"": true}, ""scope"": ""/providers/microsoft.aadiam""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID/Data%20Connectors/template_AzureActiveDirectory.JSON","true" -"AADRiskyUsers","Microsoft Entra ID","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID","azuresentinel","azure-sentinel-solution-azureactivedirectory","2022-05-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureActiveDirectory","Microsoft","Microsoft Entra ID","Gain insights into Microsoft Entra ID by connecting Audit and Sign-in logs to Microsoft Sentinel to gather insights around Microsoft Entra ID scenarios. You can learn about app usage, conditional access policies, legacy auth relate details using our Sign-in logs. You can get information on your Self Service Password Reset (SSPR) usage, Microsoft Entra ID Management activities like user, group, role, app management using our Audit logs table. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/?linkid=2219715&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Connect Microsoft Entra ID logs to Microsoft Sentinel"", ""description"": ""Select Microsoft Entra ID log types:"", ""instructions"": [{""parameters"": {""connectorKind"": ""AzureActiveDirectory"", ""dataTypes"": [{""title"": ""Sign-In Logs"", ""name"": ""SignInLogs"", ""infoBoxHtmlTemplate"": ""In order to export Sign-in data, your organization needs Microsoft Entra ID P1 or P2 license. If you don't have a P1 or P2, start a free trial.""}, {""title"": ""Audit Logs"", ""name"": ""AuditLogs""}, {""title"": ""Non-Interactive User Sign-In Log"", ""name"": ""NonInteractiveUserSignInLogs""}, {""title"": ""Service Principal Sign-In Logs"", ""name"": ""ServicePrincipalSignInLogs""}, {""title"": ""Managed Identity Sign-In Logs"", ""name"": ""ManagedIdentitySignInLogs""}, {""title"": ""Provisioning Logs"", ""name"": ""ProvisioningLogs""}, {""title"": ""ADFS Sign-In Logs"", ""name"": ""ADFSSignInLogs""}, {""title"": ""User Risk Events"", ""name"": ""UserRiskEvents""}, {""title"": ""Risky Users"", ""name"": ""RiskyUsers""}, {""title"": ""Network Access Traffic Logs"", ""name"": ""NetworkAccessTrafficLogs""}, {""title"": ""Risky Service Principals"", ""name"": ""RiskyServicePrincipals""}, {""title"": ""Service Principal Risk Events"", ""name"": ""ServicePrincipalRiskEvents""}]}, ""type"": ""AADDataTypes""}]}]","{""tenant"": [""GlobalAdmin"", ""SecurityAdmin""], ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""microsoft.aadiam/diagnosticSettings"", ""providerDisplayName"": ""Diagnostic Settings"", ""permissionsDisplayText"": ""read and write permissions to AAD diagnostic settings."", ""requiredPermissions"": {""read"": true, ""write"": true}, ""scope"": ""/providers/microsoft.aadiam""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID/Data%20Connectors/template_AzureActiveDirectory.JSON","true" -"NetworkAccessTraffic","Microsoft Entra ID","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID","azuresentinel","azure-sentinel-solution-azureactivedirectory","2022-05-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureActiveDirectory","Microsoft","Microsoft Entra ID","Gain insights into Microsoft Entra ID by connecting Audit and Sign-in logs to Microsoft Sentinel to gather insights around Microsoft Entra ID scenarios. You can learn about app usage, conditional access policies, legacy auth relate details using our Sign-in logs. You can get information on your Self Service Password Reset (SSPR) usage, Microsoft Entra ID Management activities like user, group, role, app management using our Audit logs table. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/?linkid=2219715&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Connect Microsoft Entra ID logs to Microsoft Sentinel"", ""description"": ""Select Microsoft Entra ID log types:"", ""instructions"": [{""parameters"": {""connectorKind"": ""AzureActiveDirectory"", ""dataTypes"": [{""title"": ""Sign-In Logs"", ""name"": ""SignInLogs"", ""infoBoxHtmlTemplate"": ""In order to export Sign-in data, your organization needs Microsoft Entra ID P1 or P2 license. If you don't have a P1 or P2, start a free trial.""}, {""title"": ""Audit Logs"", ""name"": ""AuditLogs""}, {""title"": ""Non-Interactive User Sign-In Log"", ""name"": ""NonInteractiveUserSignInLogs""}, {""title"": ""Service Principal Sign-In Logs"", ""name"": ""ServicePrincipalSignInLogs""}, {""title"": ""Managed Identity Sign-In Logs"", ""name"": ""ManagedIdentitySignInLogs""}, {""title"": ""Provisioning Logs"", ""name"": ""ProvisioningLogs""}, {""title"": ""ADFS Sign-In Logs"", ""name"": ""ADFSSignInLogs""}, {""title"": ""User Risk Events"", ""name"": ""UserRiskEvents""}, {""title"": ""Risky Users"", ""name"": ""RiskyUsers""}, {""title"": ""Network Access Traffic Logs"", ""name"": ""NetworkAccessTrafficLogs""}, {""title"": ""Risky Service Principals"", ""name"": ""RiskyServicePrincipals""}, {""title"": ""Service Principal Risk Events"", ""name"": ""ServicePrincipalRiskEvents""}]}, ""type"": ""AADDataTypes""}]}]","{""tenant"": [""GlobalAdmin"", ""SecurityAdmin""], ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""microsoft.aadiam/diagnosticSettings"", ""providerDisplayName"": ""Diagnostic Settings"", ""permissionsDisplayText"": ""read and write permissions to AAD diagnostic settings."", ""requiredPermissions"": {""read"": true, ""write"": true}, ""scope"": ""/providers/microsoft.aadiam""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID/Data%20Connectors/template_AzureActiveDirectory.JSON","true" -"RiskyServicePrincipals","Microsoft Entra ID","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID","azuresentinel","azure-sentinel-solution-azureactivedirectory","2022-05-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureActiveDirectory","Microsoft","Microsoft Entra ID","Gain insights into Microsoft Entra ID by connecting Audit and Sign-in logs to Microsoft Sentinel to gather insights around Microsoft Entra ID scenarios. You can learn about app usage, conditional access policies, legacy auth relate details using our Sign-in logs. You can get information on your Self Service Password Reset (SSPR) usage, Microsoft Entra ID Management activities like user, group, role, app management using our Audit logs table. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/?linkid=2219715&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Connect Microsoft Entra ID logs to Microsoft Sentinel"", ""description"": ""Select Microsoft Entra ID log types:"", ""instructions"": [{""parameters"": {""connectorKind"": ""AzureActiveDirectory"", ""dataTypes"": [{""title"": ""Sign-In Logs"", ""name"": ""SignInLogs"", ""infoBoxHtmlTemplate"": ""In order to export Sign-in data, your organization needs Microsoft Entra ID P1 or P2 license. If you don't have a P1 or P2, start a free trial.""}, {""title"": ""Audit Logs"", ""name"": ""AuditLogs""}, {""title"": ""Non-Interactive User Sign-In Log"", ""name"": ""NonInteractiveUserSignInLogs""}, {""title"": ""Service Principal Sign-In Logs"", ""name"": ""ServicePrincipalSignInLogs""}, {""title"": ""Managed Identity Sign-In Logs"", ""name"": ""ManagedIdentitySignInLogs""}, {""title"": ""Provisioning Logs"", ""name"": ""ProvisioningLogs""}, {""title"": ""ADFS Sign-In Logs"", ""name"": ""ADFSSignInLogs""}, {""title"": ""User Risk Events"", ""name"": ""UserRiskEvents""}, {""title"": ""Risky Users"", ""name"": ""RiskyUsers""}, {""title"": ""Network Access Traffic Logs"", ""name"": ""NetworkAccessTrafficLogs""}, {""title"": ""Risky Service Principals"", ""name"": ""RiskyServicePrincipals""}, {""title"": ""Service Principal Risk Events"", ""name"": ""ServicePrincipalRiskEvents""}]}, ""type"": ""AADDataTypes""}]}]","{""tenant"": [""GlobalAdmin"", ""SecurityAdmin""], ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""microsoft.aadiam/diagnosticSettings"", ""providerDisplayName"": ""Diagnostic Settings"", ""permissionsDisplayText"": ""read and write permissions to AAD diagnostic settings."", ""requiredPermissions"": {""read"": true, ""write"": true}, ""scope"": ""/providers/microsoft.aadiam""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID/Data%20Connectors/template_AzureActiveDirectory.JSON","true" -"RiskyUsers","Microsoft Entra ID","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID","azuresentinel","azure-sentinel-solution-azureactivedirectory","2022-05-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureActiveDirectory","Microsoft","Microsoft Entra ID","Gain insights into Microsoft Entra ID by connecting Audit and Sign-in logs to Microsoft Sentinel to gather insights around Microsoft Entra ID scenarios. You can learn about app usage, conditional access policies, legacy auth relate details using our Sign-in logs. You can get information on your Self Service Password Reset (SSPR) usage, Microsoft Entra ID Management activities like user, group, role, app management using our Audit logs table. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/?linkid=2219715&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Connect Microsoft Entra ID logs to Microsoft Sentinel"", ""description"": ""Select Microsoft Entra ID log types:"", ""instructions"": [{""parameters"": {""connectorKind"": ""AzureActiveDirectory"", ""dataTypes"": [{""title"": ""Sign-In Logs"", ""name"": ""SignInLogs"", ""infoBoxHtmlTemplate"": ""In order to export Sign-in data, your organization needs Microsoft Entra ID P1 or P2 license. If you don't have a P1 or P2, start a free trial.""}, {""title"": ""Audit Logs"", ""name"": ""AuditLogs""}, {""title"": ""Non-Interactive User Sign-In Log"", ""name"": ""NonInteractiveUserSignInLogs""}, {""title"": ""Service Principal Sign-In Logs"", ""name"": ""ServicePrincipalSignInLogs""}, {""title"": ""Managed Identity Sign-In Logs"", ""name"": ""ManagedIdentitySignInLogs""}, {""title"": ""Provisioning Logs"", ""name"": ""ProvisioningLogs""}, {""title"": ""ADFS Sign-In Logs"", ""name"": ""ADFSSignInLogs""}, {""title"": ""User Risk Events"", ""name"": ""UserRiskEvents""}, {""title"": ""Risky Users"", ""name"": ""RiskyUsers""}, {""title"": ""Network Access Traffic Logs"", ""name"": ""NetworkAccessTrafficLogs""}, {""title"": ""Risky Service Principals"", ""name"": ""RiskyServicePrincipals""}, {""title"": ""Service Principal Risk Events"", ""name"": ""ServicePrincipalRiskEvents""}]}, ""type"": ""AADDataTypes""}]}]","{""tenant"": [""GlobalAdmin"", ""SecurityAdmin""], ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""microsoft.aadiam/diagnosticSettings"", ""providerDisplayName"": ""Diagnostic Settings"", ""permissionsDisplayText"": ""read and write permissions to AAD diagnostic settings."", ""requiredPermissions"": {""read"": true, ""write"": true}, ""scope"": ""/providers/microsoft.aadiam""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID/Data%20Connectors/template_AzureActiveDirectory.JSON","true" -"","Microsoft Entra ID Assets","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID%20Assets","azuresentinel","azure-sentinel-solution-entraidassets","2025-06-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"","Microsoft Entra ID Assets","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID%20Assets","azuresentinel","azure-sentinel-solution-entraidassets","2025-06-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","EntraIDAssets","Microsoft","Microsoft Entra ID Assets","Entra ID assets data connector gives richer insights into activity data by supplementing details with asset information. Data from this connector is used to build data risk graphs in Purview. If you have enabled those graphs, deactivating this Connector will prevent the graphs from being built. [Learn about the data risk graph.](https://go.microsoft.com/fwlink/?linkid=2320023)","[{""instructions"": [{""parameters"": {""type"": ""EntraIDAssets"", ""description"": ""1. Connect Microsoft Entra ID assets to ingest into Microsoft Sentinel Lake."", ""items"": [{""label"": ""Applications""}, {""label"": ""Group Memberships""}, {""label"": ""Groups""}, {""label"": ""Members""}, {""label"": ""Organizations""}, {""label"": ""Service Principals""}, {""label"": ""Users""}]}, ""type"": ""MSG""}]}]","{""tenant"": [""GlobalAdmin"", ""SecurityAdmin""]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID%20Assets/Data%20Connectors/EntraIDAssets_DataConnectorDefinition.json","true" -"","Microsoft Entra ID Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID%20Protection","azuresentinel","azure-sentinel-solution-azureactivedirectoryip","2022-05-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"SecurityAlert","Microsoft Entra ID Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID%20Protection","azuresentinel","azure-sentinel-solution-azureactivedirectoryip","2022-05-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureActiveDirectoryIdentityProtection","Microsoft","Microsoft Entra ID Protection","Microsoft Entra ID Protection provides a consolidated view at risk users, risk events and vulnerabilities, with the ability to remediate risk immediately, and set policies to auto-remediate future events. The service is built on Microsoft’s experience protecting consumer identities and gains tremendous accuracy from the signal from over 13 billion logins a day. Integrate Microsoft Microsoft Entra ID Protection alerts with Microsoft Sentinel to view dashboards, create custom alerts, and improve investigation. For more information, see the [Microsoft Sentinel documentation ](https://go.microsoft.com/fwlink/p/?linkid=2220065&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).

[Get Microsoft Entra ID Premium P1/P2 ](https://aka.ms/asi-ipcconnectorgetlink)","[{""title"": ""Microsoft Entra ID Protection alerts to Microsoft Sentinel"", ""description"": ""Connect Microsoft Entra ID Protection to Microsoft Sentinel. \n \n> The alerts are sent to this Microsoft Sentinel workspace."", ""instructions"": [{""parameters"": {""connectorKind"": ""AzureActiveDirectory"", ""title"": ""Microsoft Entra ID Protection"", ""enable"": true}, ""type"": ""SentinelResourceProvider""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""tenant"": [""GlobalAdmin"", ""SecurityAdmin""], ""licenses"": [""AadP1P2""]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID%20Protection/Data%20Connectors/template_AzureActiveDirectoryIdentityProtection.JSON","true" -"","Microsoft Exchange Security - Exchange On-Premises","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises","microsoftsentinelcommunity","azure-sentinel-solution-exchangesecurityinsights","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","","","","","","","false","","false" -"Event","Microsoft Exchange Security - Exchange On-Premises","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises","microsoftsentinelcommunity","azure-sentinel-solution-exchangesecurityinsights","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ESI-ExchangeAdminAuditLogEvents","Microsoft","[Deprecated] Microsoft Exchange Logs and Events","Deprecated, use the 'ESI-Opt' dataconnectors. You can stream all Exchange Audit events, IIS Logs, HTTP Proxy logs and Security Event logs from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you to view dashboards, create custom alerts, and improve investigation. This is used by Microsoft Exchange Security Workbooks to provide security insights of your On-Premises Exchange environment","[{""description"": "">**NOTE:** This solution is based on options. This allows you to choose which data will be ingest as some options can generate a very high volume of data. Depending on what you want to collect, track in your Workbooks, Analytics Rules, Hunting capabilities you will choose the option(s) you will deploy. Each options are independant for one from the other. To learn more about each option: ['Microsoft Exchange Security' wiki](https://aka.ms/ESI_DataConnectorOptions)""}, {""title"": ""1. Download and install the agents needed to collect logs for Microsoft Sentinel"", ""description"": ""Type of servers (Exchange Servers, Domain Controllers linked to Exchange Servers or all Domain Controllers) depends on the option you want to deploy."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Deploy Monitor Agents"", ""description"": ""This step is required only if it's the first time you onboard your Exchange Servers/Domain Controllers"", ""instructions"": [{""parameters"": {""title"": ""Select which agent you want to install in your servers to collect logs:"", ""instructionSteps"": [{""title"": ""[Prefered] Azure Monitor Agent via Azure Arc"", ""description"": ""**Deploy the Azure Arc Agent**\n> [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""title"": ""Install Azure Log Analytics Agent (Deprecated on 31/08/2024)"", ""description"": ""1. Download the Azure Log Analytics Agent and choose the deployment method in the below link."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Deploy log injestion following choosed options"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""[Option 1] MS Exchange Management Log collection"", ""description"": ""Select how to stream MS Exchange Admin Audit event logs"", ""instructions"": [{""parameters"": {""title"": ""MS Exchange Admin Audit event logs"", ""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Microsoft Exchange Admin Audit Events logs are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCR.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption1-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace Name** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCR, Type Event log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'Windows Event logs' and select 'Custom' option, enter 'MSExchange Management' as expression and Add it.\n6. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\nConfigure the Events you want to collect and their severities.\n\n1. Under workspace **Legacy agents management**, select **Windows Event logs**.\n2. Click **Add Windows event log** and enter **MSExchange Management** as log name.\n3. Collect Error, Warning and Information types\n4. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 2] Security/Application/System logs of Exchange Servers"", ""description"": ""Select how to stream Security/Application/System logs of Exchange Servers"", ""instructions"": [{""parameters"": {""title"": ""Security Event log collection"", ""instructionSteps"": [{""title"": ""Data Collection Rules - Security Event logs"", ""description"": ""**Enable data collection rule for Security Logs**\nSecurity Events logs are collected only from **Windows** agents.\n1. Add Exchange Servers on *Resources* tab.\n2. Select Security log level\n\n> **Common level** is the minimum required. Please select 'Common' or 'All Security Events' on DCR definition."", ""instructions"": [{""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 0}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""title"": ""Application and System Event log collection"", ""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Application and System Events logs are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCR.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace Name** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCR, Type Event log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'Windows Event logs' and select 'Basic' option.\n6. For Application, select 'Critical', 'Error' and 'Warning'. For System, select Critical/Error/Warning/Information. \n7. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\nConfigure the Events you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **Windows Event logs**.\n2. Click **Add Windows event log** and search **Application** as log name.\n3. Click **Add Windows event log** and search **System** as log name.\n4. Collect Error (for all), Warning (for all) and Information (for System) types\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 3 and 4] Security logs of Domain Controllers"", ""description"": ""Select how to stream Security logs of Domain Controllers. If you want to implement Option 3, you just need to select DC on same site as Exchange Servers. If you want to implement Option 4, you can select all DCs of your forest."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""[Option 3] List only Domain Controllers on the same site as Exchange Servers for next step"", ""description"": ""**This limits the quantity of data injested but some incident can't be detected.**""}, {""title"": ""[Option 4] List all Domain Controllers of your Active-Directory Forest for next step"", ""description"": ""**This allows collecting all security events**""}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""title"": ""Security Event log collection"", ""instructionSteps"": [{""title"": ""Data Collection Rules - Security Event logs"", ""description"": ""**Enable data collection rule for Security Logs**\nSecurity Events logs are collected only from **Windows** agents.\n1. Add chosen DCs on *Resources* tab.\n2. Select Security log level\n\n> **Common level** is the minimum required. Please select 'Common' or 'All Security Events' on DCR definition."", ""instructions"": [{""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 0}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 5] IIS logs of Exchange Servers"", ""description"": ""Select how to stream IIS logs of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> IIS logs are collected only from **Windows** agents."", ""instructions"": [{""type"": ""AdminAuditEvents""}, {""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption5-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create DCR, Type IIS log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. Select the created DCE. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'IIS logs' (Do not enter a path if IIS Logs path is configured by default). Click on 'Add data source'\n6. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\nConfigure the Events you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **IIS Logs**.\n2. Check **Collect W3C format IIS log files**\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 6] Message Tracking of Exchange Servers"", ""description"": ""Select how to stream Message Tracking of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Message Tracking are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""text"": ""**Attention**, Custom logs in Monitor Agent is in Preview. The deployment doesn't work as expected for the moment (March 2023)."", ""inline"": false}, ""type"": ""InfoMessage""}, {""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule and Custom Table"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption6-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE, like ESI-ExchangeServers. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create Custom DCR Table"", ""description"": ""1. Download the Example file from [Microsoft Sentinel GitHub](https://aka.ms/Sentinel-Sample-ESI-MessageTrackingExampleFile).\n2. From the Azure Portal, navigate to [Workspace Analytics](https://portal.azure.com/#view/HubsExtension/BrowseResource/resourceType/Microsoft.OperationalInsights%2Fworkspaces) and select your target Workspace.\n3. Click in 'Tables', click **+ Create** at the top and select **New Custom log (DCR-Based)**.\n4. In the **Basics** tab, enter **MessageTrackingLog** on the Table name, create a Data Collection rule with the name **DCR-Option6-MessageTrackingLogs** (for example) and select the previously created Data collection Endpoint.\n5. In the **Schema and Transformation** tab, choose the downloaded sample file and click on **Transformation Editor**.\n6. In the transformation field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(['date-time'])\n| extend\n clientHostname = ['client-hostname'],\n clientIP = ['client-ip'],\n connectorId = ['connector-id'],\n customData = ['custom-data'],\n eventId = ['event-id'],\n internalMessageId = ['internal-message-id'],\n logId = ['log-id'],\n messageId = ['message-id'],\n messageInfo = ['message-info'],\n messageSubject = ['message-subject'],\n networkMessageId = ['network-message-id'],\n originalClientIp = ['original-client-ip'],\n originalServerIp = ['original-server-ip'],\n recipientAddress= ['recipient-address'],\n recipientCount= ['recipient-count'],\n recipientStatus= ['recipient-status'],\n relatedRecipientAddress= ['related-recipient-address'],\n returnPath= ['return-path'],\n senderAddress= ['sender-address'],\n senderHostname= ['server-hostname'],\n serverIp= ['server-ip'],\n sourceContext= ['source-context'],\n schemaVersion=['schema-version'],\n messageTrackingTenantId = ['tenant-id'],\n totalBytes = ['total-bytes'],\n transportTrafficType = ['transport-traffic-type']\n| project-away\n ['client-ip'],\n ['client-hostname'],\n ['connector-id'],\n ['custom-data'],\n ['date-time'],\n ['event-id'],\n ['internal-message-id'],\n ['log-id'],\n ['message-id'],\n ['message-info'],\n ['message-subject'],\n ['network-message-id'],\n ['original-client-ip'],\n ['original-server-ip'],\n ['recipient-address'],\n ['recipient-count'],\n ['recipient-status'],\n ['related-recipient-address'],\n ['return-path'],\n ['sender-address'],\n ['server-hostname'],\n ['server-ip'],\n ['source-context'],\n ['schema-version'],\n ['tenant-id'],\n ['total-bytes'],\n ['transport-traffic-type']*\n\n8. Click 'Run' and after 'Apply'.\n9. Click **Next**, then click **Create**.""}, {""title"": ""C. Modify the created DCR, Type Custom log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Select the previously created DCR, like **DCR-Option6-MessageTrackingLogs**.\n3. In the **Resources** tab, enter you Exchange Servers.\n4. In **Data Sources**, add a Data Source type 'Custom Text logs' and enter 'C:\\Program Files\\Microsoft\\Exchange Server\\V15\\TransportRoles\\Logs\\MessageTracking\\*.log' in file pattern, 'MessageTrackingLog_CL' in Table Name.\n6.in Transform field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(['date-time'])\n| extend\n clientHostname = ['client-hostname'],\n clientIP = ['client-ip'],\n connectorId = ['connector-id'],\n customData = ['custom-data'],\n eventId = ['event-id'],\n internalMessageId = ['internal-message-id'],\n logId = ['log-id'],\n messageId = ['message-id'],\n messageInfo = ['message-info'],\n messageSubject = ['message-subject'],\n networkMessageId = ['network-message-id'],\n originalClientIp = ['original-client-ip'],\n originalServerIp = ['original-server-ip'],\n recipientAddress= ['recipient-address'],\n recipientCount= ['recipient-count'],\n recipientStatus= ['recipient-status'],\n relatedRecipientAddress= ['related-recipient-address'],\n returnPath= ['return-path'],\n senderAddress= ['sender-address'],\n senderHostname= ['server-hostname'],\n serverIp= ['server-ip'],\n sourceContext= ['source-context'],\n schemaVersion=['schema-version'],\n messageTrackingTenantId = ['tenant-id'],\n totalBytes = ['total-bytes'],\n transportTrafficType = ['transport-traffic-type']\n| project-away\n ['client-ip'],\n ['client-hostname'],\n ['connector-id'],\n ['custom-data'],\n ['date-time'],\n ['event-id'],\n ['internal-message-id'],\n ['log-id'],\n ['message-id'],\n ['message-info'],\n ['message-subject'],\n ['network-message-id'],\n ['original-client-ip'],\n ['original-server-ip'],\n ['recipient-address'],\n ['recipient-count'],\n ['recipient-status'],\n ['related-recipient-address'],\n ['return-path'],\n ['sender-address'],\n ['server-hostname'],\n ['server-ip'],\n ['source-context'],\n ['schema-version'],\n ['tenant-id'],\n ['total-bytes'],\n ['transport-traffic-type']* \n7. Click on 'Add data source'.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\n1. Under workspace **Settings** part, select **Tables**, click **+ Create** and click on **New custom log (MMA-Based)**.\n2. Select Sample file **[MessageTracking Sample](https://aka.ms/Sentinel-Sample-ESI-MessageTrackingLogsSampleCSV)** and click Next\n3. Select type **Windows** and enter the path **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\TransportRoles\\Logs\\MessageTracking\\*.log**. Click Next.\n4. Enter **MessageTrackingLog** as Table name and click Next.\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 7] HTTP Proxy of Exchange Servers"", ""description"": ""Select how to stream HTTP Proxy of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Message Tracking are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""text"": ""**Attention**, Custom logs in Monitor Agent is in Preview. The deployment doesn't work as expected for the moment (March 2023)."", ""inline"": false}, ""type"": ""InfoMessage""}, {""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption7-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create Custom DCR Table"", ""description"": ""1. Download the Example file from [Microsoft Sentinel GitHub](https://aka.ms/Sentinel-Sample-ESI-HTTPProxyExampleFile).\n2. From the Azure Portal, navigate to [Workspace Analytics](https://portal.azure.com/#view/HubsExtension/BrowseResource/resourceType/Microsoft.OperationalInsights%2Fworkspaces) and select your target Workspace.\n3. Click in 'Tables', click **+ Create** at the top and select **New Custom log (DCR-Based)**.\n4. In the **Basics** tab, enter **ExchangeHttpProxy** on the Table name, create a Data Collection rule with the name **DCR-Option7-HTTPProxyLogs** (for example) and select the previously created Data collection Endpoint.\n5. In the **Schema and Transformation** tab, choose the downloaded sample file and click on **Transformation Editor**.\n6. In the transformation field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(DateTime)\n| project-away DateTime\n*\n\n8. Click 'Run' and after 'Apply'.\n9. Click **Next**, then click **Create**.""}, {""title"": ""C. Modify the created DCR, Type Custom log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Select the previously created DCR, like **DCR-Option7-HTTPProxyLogs**.\n3. In the **Resources** tab, enter you Exchange Servers.\n4. In **Data Sources**, add a Data Source type 'Custom Text logs' and enter 'C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Autodiscover\\*.log' in file pattern, 'ExchangeHttpProxy_CL' in Table Name.\n6.in Transform field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(DateTime)\n| project-away DateTime* \n7. Click on 'Add data source'.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\n1. Under workspace **Settings** part, select **Tables**, click **+ Create** and click on **New custom log (MMA-Based)**.\n2. Select Sample file **[MessageTracking Sample](https://aka.ms/Sentinel-Sample-ESI-HttpProxySampleCSV)** and click Next\n3. Select type **Windows** and enter all the following paths **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Autodiscover\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Eas\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Ecp\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Ews\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Mapi\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Oab\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Owa\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\OwaCalendar\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\PowerShell\\*.log** and **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\RpcHttp\\*.log** . Click Next.\n4. Enter **ExchangeHttpProxy** as Table name and click Next.\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. Parsers are automatically deployed with the solution. Follow the steps to create the Kusto Functions alias : [**ExchangeAdminAuditLogs**](https://aka.ms/sentinel-ESI-ExchangeCollector-ExchangeAdminAuditLogs-parser)"", ""instructions"": [{""parameters"": {""title"": ""Parsers are automatically deployed during Solution deployment. If you want to deploy manually, follow the steps below"", ""instructionSteps"": [{""title"": ""Manual Parser Deployment"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""1. Download the Parser file"", ""description"": ""The latest version of the file [**ExchangeAdminAuditLogs**](https://aka.ms/sentinel-ESI-ExchangeCollector-ExchangeAdminAuditLogs-parser)""}, {""title"": ""2. Create Parser **ExchangeAdminAuditLogs** function"", ""description"": ""In 'Logs' explorer of your Microsoft Sentinel's log analytics, copy the content of the file to Log explorer""}, {""title"": ""3. Save Parser **ExchangeAdminAuditLogs** function"", ""description"": ""Click on save button.\n No parameter is needed for this parser.\nClick save again.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""Azure Log Analytics will be deprecated, to collect data from non-Azure VMs, Azure Arc is recommended. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""name"": ""Detailled documentation"", ""description"": "">**NOTE:** Detailled documentation on Installation procedure and usage can be found [here](https://aka.ms/MicrosoftExchangeSecurityGithub)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises/Data%20Connectors/ESI-ExchangeAdminAuditLogEvents.json","true" -"ExchangeHttpProxy_CL","Microsoft Exchange Security - Exchange On-Premises","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises","microsoftsentinelcommunity","azure-sentinel-solution-exchangesecurityinsights","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ESI-ExchangeAdminAuditLogEvents","Microsoft","[Deprecated] Microsoft Exchange Logs and Events","Deprecated, use the 'ESI-Opt' dataconnectors. You can stream all Exchange Audit events, IIS Logs, HTTP Proxy logs and Security Event logs from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you to view dashboards, create custom alerts, and improve investigation. This is used by Microsoft Exchange Security Workbooks to provide security insights of your On-Premises Exchange environment","[{""description"": "">**NOTE:** This solution is based on options. This allows you to choose which data will be ingest as some options can generate a very high volume of data. Depending on what you want to collect, track in your Workbooks, Analytics Rules, Hunting capabilities you will choose the option(s) you will deploy. Each options are independant for one from the other. To learn more about each option: ['Microsoft Exchange Security' wiki](https://aka.ms/ESI_DataConnectorOptions)""}, {""title"": ""1. Download and install the agents needed to collect logs for Microsoft Sentinel"", ""description"": ""Type of servers (Exchange Servers, Domain Controllers linked to Exchange Servers or all Domain Controllers) depends on the option you want to deploy."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Deploy Monitor Agents"", ""description"": ""This step is required only if it's the first time you onboard your Exchange Servers/Domain Controllers"", ""instructions"": [{""parameters"": {""title"": ""Select which agent you want to install in your servers to collect logs:"", ""instructionSteps"": [{""title"": ""[Prefered] Azure Monitor Agent via Azure Arc"", ""description"": ""**Deploy the Azure Arc Agent**\n> [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""title"": ""Install Azure Log Analytics Agent (Deprecated on 31/08/2024)"", ""description"": ""1. Download the Azure Log Analytics Agent and choose the deployment method in the below link."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Deploy log injestion following choosed options"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""[Option 1] MS Exchange Management Log collection"", ""description"": ""Select how to stream MS Exchange Admin Audit event logs"", ""instructions"": [{""parameters"": {""title"": ""MS Exchange Admin Audit event logs"", ""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Microsoft Exchange Admin Audit Events logs are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCR.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption1-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace Name** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCR, Type Event log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'Windows Event logs' and select 'Custom' option, enter 'MSExchange Management' as expression and Add it.\n6. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\nConfigure the Events you want to collect and their severities.\n\n1. Under workspace **Legacy agents management**, select **Windows Event logs**.\n2. Click **Add Windows event log** and enter **MSExchange Management** as log name.\n3. Collect Error, Warning and Information types\n4. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 2] Security/Application/System logs of Exchange Servers"", ""description"": ""Select how to stream Security/Application/System logs of Exchange Servers"", ""instructions"": [{""parameters"": {""title"": ""Security Event log collection"", ""instructionSteps"": [{""title"": ""Data Collection Rules - Security Event logs"", ""description"": ""**Enable data collection rule for Security Logs**\nSecurity Events logs are collected only from **Windows** agents.\n1. Add Exchange Servers on *Resources* tab.\n2. Select Security log level\n\n> **Common level** is the minimum required. Please select 'Common' or 'All Security Events' on DCR definition."", ""instructions"": [{""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 0}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""title"": ""Application and System Event log collection"", ""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Application and System Events logs are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCR.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace Name** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCR, Type Event log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'Windows Event logs' and select 'Basic' option.\n6. For Application, select 'Critical', 'Error' and 'Warning'. For System, select Critical/Error/Warning/Information. \n7. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\nConfigure the Events you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **Windows Event logs**.\n2. Click **Add Windows event log** and search **Application** as log name.\n3. Click **Add Windows event log** and search **System** as log name.\n4. Collect Error (for all), Warning (for all) and Information (for System) types\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 3 and 4] Security logs of Domain Controllers"", ""description"": ""Select how to stream Security logs of Domain Controllers. If you want to implement Option 3, you just need to select DC on same site as Exchange Servers. If you want to implement Option 4, you can select all DCs of your forest."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""[Option 3] List only Domain Controllers on the same site as Exchange Servers for next step"", ""description"": ""**This limits the quantity of data injested but some incident can't be detected.**""}, {""title"": ""[Option 4] List all Domain Controllers of your Active-Directory Forest for next step"", ""description"": ""**This allows collecting all security events**""}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""title"": ""Security Event log collection"", ""instructionSteps"": [{""title"": ""Data Collection Rules - Security Event logs"", ""description"": ""**Enable data collection rule for Security Logs**\nSecurity Events logs are collected only from **Windows** agents.\n1. Add chosen DCs on *Resources* tab.\n2. Select Security log level\n\n> **Common level** is the minimum required. Please select 'Common' or 'All Security Events' on DCR definition."", ""instructions"": [{""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 0}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 5] IIS logs of Exchange Servers"", ""description"": ""Select how to stream IIS logs of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> IIS logs are collected only from **Windows** agents."", ""instructions"": [{""type"": ""AdminAuditEvents""}, {""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption5-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create DCR, Type IIS log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. Select the created DCE. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'IIS logs' (Do not enter a path if IIS Logs path is configured by default). Click on 'Add data source'\n6. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\nConfigure the Events you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **IIS Logs**.\n2. Check **Collect W3C format IIS log files**\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 6] Message Tracking of Exchange Servers"", ""description"": ""Select how to stream Message Tracking of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Message Tracking are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""text"": ""**Attention**, Custom logs in Monitor Agent is in Preview. The deployment doesn't work as expected for the moment (March 2023)."", ""inline"": false}, ""type"": ""InfoMessage""}, {""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule and Custom Table"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption6-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE, like ESI-ExchangeServers. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create Custom DCR Table"", ""description"": ""1. Download the Example file from [Microsoft Sentinel GitHub](https://aka.ms/Sentinel-Sample-ESI-MessageTrackingExampleFile).\n2. From the Azure Portal, navigate to [Workspace Analytics](https://portal.azure.com/#view/HubsExtension/BrowseResource/resourceType/Microsoft.OperationalInsights%2Fworkspaces) and select your target Workspace.\n3. Click in 'Tables', click **+ Create** at the top and select **New Custom log (DCR-Based)**.\n4. In the **Basics** tab, enter **MessageTrackingLog** on the Table name, create a Data Collection rule with the name **DCR-Option6-MessageTrackingLogs** (for example) and select the previously created Data collection Endpoint.\n5. In the **Schema and Transformation** tab, choose the downloaded sample file and click on **Transformation Editor**.\n6. In the transformation field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(['date-time'])\n| extend\n clientHostname = ['client-hostname'],\n clientIP = ['client-ip'],\n connectorId = ['connector-id'],\n customData = ['custom-data'],\n eventId = ['event-id'],\n internalMessageId = ['internal-message-id'],\n logId = ['log-id'],\n messageId = ['message-id'],\n messageInfo = ['message-info'],\n messageSubject = ['message-subject'],\n networkMessageId = ['network-message-id'],\n originalClientIp = ['original-client-ip'],\n originalServerIp = ['original-server-ip'],\n recipientAddress= ['recipient-address'],\n recipientCount= ['recipient-count'],\n recipientStatus= ['recipient-status'],\n relatedRecipientAddress= ['related-recipient-address'],\n returnPath= ['return-path'],\n senderAddress= ['sender-address'],\n senderHostname= ['server-hostname'],\n serverIp= ['server-ip'],\n sourceContext= ['source-context'],\n schemaVersion=['schema-version'],\n messageTrackingTenantId = ['tenant-id'],\n totalBytes = ['total-bytes'],\n transportTrafficType = ['transport-traffic-type']\n| project-away\n ['client-ip'],\n ['client-hostname'],\n ['connector-id'],\n ['custom-data'],\n ['date-time'],\n ['event-id'],\n ['internal-message-id'],\n ['log-id'],\n ['message-id'],\n ['message-info'],\n ['message-subject'],\n ['network-message-id'],\n ['original-client-ip'],\n ['original-server-ip'],\n ['recipient-address'],\n ['recipient-count'],\n ['recipient-status'],\n ['related-recipient-address'],\n ['return-path'],\n ['sender-address'],\n ['server-hostname'],\n ['server-ip'],\n ['source-context'],\n ['schema-version'],\n ['tenant-id'],\n ['total-bytes'],\n ['transport-traffic-type']*\n\n8. Click 'Run' and after 'Apply'.\n9. Click **Next**, then click **Create**.""}, {""title"": ""C. Modify the created DCR, Type Custom log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Select the previously created DCR, like **DCR-Option6-MessageTrackingLogs**.\n3. In the **Resources** tab, enter you Exchange Servers.\n4. In **Data Sources**, add a Data Source type 'Custom Text logs' and enter 'C:\\Program Files\\Microsoft\\Exchange Server\\V15\\TransportRoles\\Logs\\MessageTracking\\*.log' in file pattern, 'MessageTrackingLog_CL' in Table Name.\n6.in Transform field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(['date-time'])\n| extend\n clientHostname = ['client-hostname'],\n clientIP = ['client-ip'],\n connectorId = ['connector-id'],\n customData = ['custom-data'],\n eventId = ['event-id'],\n internalMessageId = ['internal-message-id'],\n logId = ['log-id'],\n messageId = ['message-id'],\n messageInfo = ['message-info'],\n messageSubject = ['message-subject'],\n networkMessageId = ['network-message-id'],\n originalClientIp = ['original-client-ip'],\n originalServerIp = ['original-server-ip'],\n recipientAddress= ['recipient-address'],\n recipientCount= ['recipient-count'],\n recipientStatus= ['recipient-status'],\n relatedRecipientAddress= ['related-recipient-address'],\n returnPath= ['return-path'],\n senderAddress= ['sender-address'],\n senderHostname= ['server-hostname'],\n serverIp= ['server-ip'],\n sourceContext= ['source-context'],\n schemaVersion=['schema-version'],\n messageTrackingTenantId = ['tenant-id'],\n totalBytes = ['total-bytes'],\n transportTrafficType = ['transport-traffic-type']\n| project-away\n ['client-ip'],\n ['client-hostname'],\n ['connector-id'],\n ['custom-data'],\n ['date-time'],\n ['event-id'],\n ['internal-message-id'],\n ['log-id'],\n ['message-id'],\n ['message-info'],\n ['message-subject'],\n ['network-message-id'],\n ['original-client-ip'],\n ['original-server-ip'],\n ['recipient-address'],\n ['recipient-count'],\n ['recipient-status'],\n ['related-recipient-address'],\n ['return-path'],\n ['sender-address'],\n ['server-hostname'],\n ['server-ip'],\n ['source-context'],\n ['schema-version'],\n ['tenant-id'],\n ['total-bytes'],\n ['transport-traffic-type']* \n7. Click on 'Add data source'.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\n1. Under workspace **Settings** part, select **Tables**, click **+ Create** and click on **New custom log (MMA-Based)**.\n2. Select Sample file **[MessageTracking Sample](https://aka.ms/Sentinel-Sample-ESI-MessageTrackingLogsSampleCSV)** and click Next\n3. Select type **Windows** and enter the path **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\TransportRoles\\Logs\\MessageTracking\\*.log**. Click Next.\n4. Enter **MessageTrackingLog** as Table name and click Next.\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 7] HTTP Proxy of Exchange Servers"", ""description"": ""Select how to stream HTTP Proxy of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Message Tracking are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""text"": ""**Attention**, Custom logs in Monitor Agent is in Preview. The deployment doesn't work as expected for the moment (March 2023)."", ""inline"": false}, ""type"": ""InfoMessage""}, {""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption7-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create Custom DCR Table"", ""description"": ""1. Download the Example file from [Microsoft Sentinel GitHub](https://aka.ms/Sentinel-Sample-ESI-HTTPProxyExampleFile).\n2. From the Azure Portal, navigate to [Workspace Analytics](https://portal.azure.com/#view/HubsExtension/BrowseResource/resourceType/Microsoft.OperationalInsights%2Fworkspaces) and select your target Workspace.\n3. Click in 'Tables', click **+ Create** at the top and select **New Custom log (DCR-Based)**.\n4. In the **Basics** tab, enter **ExchangeHttpProxy** on the Table name, create a Data Collection rule with the name **DCR-Option7-HTTPProxyLogs** (for example) and select the previously created Data collection Endpoint.\n5. In the **Schema and Transformation** tab, choose the downloaded sample file and click on **Transformation Editor**.\n6. In the transformation field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(DateTime)\n| project-away DateTime\n*\n\n8. Click 'Run' and after 'Apply'.\n9. Click **Next**, then click **Create**.""}, {""title"": ""C. Modify the created DCR, Type Custom log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Select the previously created DCR, like **DCR-Option7-HTTPProxyLogs**.\n3. In the **Resources** tab, enter you Exchange Servers.\n4. In **Data Sources**, add a Data Source type 'Custom Text logs' and enter 'C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Autodiscover\\*.log' in file pattern, 'ExchangeHttpProxy_CL' in Table Name.\n6.in Transform field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(DateTime)\n| project-away DateTime* \n7. Click on 'Add data source'.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\n1. Under workspace **Settings** part, select **Tables**, click **+ Create** and click on **New custom log (MMA-Based)**.\n2. Select Sample file **[MessageTracking Sample](https://aka.ms/Sentinel-Sample-ESI-HttpProxySampleCSV)** and click Next\n3. Select type **Windows** and enter all the following paths **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Autodiscover\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Eas\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Ecp\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Ews\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Mapi\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Oab\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Owa\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\OwaCalendar\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\PowerShell\\*.log** and **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\RpcHttp\\*.log** . Click Next.\n4. Enter **ExchangeHttpProxy** as Table name and click Next.\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. Parsers are automatically deployed with the solution. Follow the steps to create the Kusto Functions alias : [**ExchangeAdminAuditLogs**](https://aka.ms/sentinel-ESI-ExchangeCollector-ExchangeAdminAuditLogs-parser)"", ""instructions"": [{""parameters"": {""title"": ""Parsers are automatically deployed during Solution deployment. If you want to deploy manually, follow the steps below"", ""instructionSteps"": [{""title"": ""Manual Parser Deployment"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""1. Download the Parser file"", ""description"": ""The latest version of the file [**ExchangeAdminAuditLogs**](https://aka.ms/sentinel-ESI-ExchangeCollector-ExchangeAdminAuditLogs-parser)""}, {""title"": ""2. Create Parser **ExchangeAdminAuditLogs** function"", ""description"": ""In 'Logs' explorer of your Microsoft Sentinel's log analytics, copy the content of the file to Log explorer""}, {""title"": ""3. Save Parser **ExchangeAdminAuditLogs** function"", ""description"": ""Click on save button.\n No parameter is needed for this parser.\nClick save again.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""Azure Log Analytics will be deprecated, to collect data from non-Azure VMs, Azure Arc is recommended. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""name"": ""Detailled documentation"", ""description"": "">**NOTE:** Detailled documentation on Installation procedure and usage can be found [here](https://aka.ms/MicrosoftExchangeSecurityGithub)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises/Data%20Connectors/ESI-ExchangeAdminAuditLogEvents.json","true" -"MessageTrackingLog_CL","Microsoft Exchange Security - Exchange On-Premises","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises","microsoftsentinelcommunity","azure-sentinel-solution-exchangesecurityinsights","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ESI-ExchangeAdminAuditLogEvents","Microsoft","[Deprecated] Microsoft Exchange Logs and Events","Deprecated, use the 'ESI-Opt' dataconnectors. You can stream all Exchange Audit events, IIS Logs, HTTP Proxy logs and Security Event logs from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you to view dashboards, create custom alerts, and improve investigation. This is used by Microsoft Exchange Security Workbooks to provide security insights of your On-Premises Exchange environment","[{""description"": "">**NOTE:** This solution is based on options. This allows you to choose which data will be ingest as some options can generate a very high volume of data. Depending on what you want to collect, track in your Workbooks, Analytics Rules, Hunting capabilities you will choose the option(s) you will deploy. Each options are independant for one from the other. To learn more about each option: ['Microsoft Exchange Security' wiki](https://aka.ms/ESI_DataConnectorOptions)""}, {""title"": ""1. Download and install the agents needed to collect logs for Microsoft Sentinel"", ""description"": ""Type of servers (Exchange Servers, Domain Controllers linked to Exchange Servers or all Domain Controllers) depends on the option you want to deploy."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Deploy Monitor Agents"", ""description"": ""This step is required only if it's the first time you onboard your Exchange Servers/Domain Controllers"", ""instructions"": [{""parameters"": {""title"": ""Select which agent you want to install in your servers to collect logs:"", ""instructionSteps"": [{""title"": ""[Prefered] Azure Monitor Agent via Azure Arc"", ""description"": ""**Deploy the Azure Arc Agent**\n> [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""title"": ""Install Azure Log Analytics Agent (Deprecated on 31/08/2024)"", ""description"": ""1. Download the Azure Log Analytics Agent and choose the deployment method in the below link."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Deploy log injestion following choosed options"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""[Option 1] MS Exchange Management Log collection"", ""description"": ""Select how to stream MS Exchange Admin Audit event logs"", ""instructions"": [{""parameters"": {""title"": ""MS Exchange Admin Audit event logs"", ""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Microsoft Exchange Admin Audit Events logs are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCR.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption1-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace Name** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCR, Type Event log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'Windows Event logs' and select 'Custom' option, enter 'MSExchange Management' as expression and Add it.\n6. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\nConfigure the Events you want to collect and their severities.\n\n1. Under workspace **Legacy agents management**, select **Windows Event logs**.\n2. Click **Add Windows event log** and enter **MSExchange Management** as log name.\n3. Collect Error, Warning and Information types\n4. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 2] Security/Application/System logs of Exchange Servers"", ""description"": ""Select how to stream Security/Application/System logs of Exchange Servers"", ""instructions"": [{""parameters"": {""title"": ""Security Event log collection"", ""instructionSteps"": [{""title"": ""Data Collection Rules - Security Event logs"", ""description"": ""**Enable data collection rule for Security Logs**\nSecurity Events logs are collected only from **Windows** agents.\n1. Add Exchange Servers on *Resources* tab.\n2. Select Security log level\n\n> **Common level** is the minimum required. Please select 'Common' or 'All Security Events' on DCR definition."", ""instructions"": [{""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 0}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""title"": ""Application and System Event log collection"", ""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Application and System Events logs are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCR.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace Name** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCR, Type Event log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'Windows Event logs' and select 'Basic' option.\n6. For Application, select 'Critical', 'Error' and 'Warning'. For System, select Critical/Error/Warning/Information. \n7. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\nConfigure the Events you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **Windows Event logs**.\n2. Click **Add Windows event log** and search **Application** as log name.\n3. Click **Add Windows event log** and search **System** as log name.\n4. Collect Error (for all), Warning (for all) and Information (for System) types\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 3 and 4] Security logs of Domain Controllers"", ""description"": ""Select how to stream Security logs of Domain Controllers. If you want to implement Option 3, you just need to select DC on same site as Exchange Servers. If you want to implement Option 4, you can select all DCs of your forest."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""[Option 3] List only Domain Controllers on the same site as Exchange Servers for next step"", ""description"": ""**This limits the quantity of data injested but some incident can't be detected.**""}, {""title"": ""[Option 4] List all Domain Controllers of your Active-Directory Forest for next step"", ""description"": ""**This allows collecting all security events**""}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""title"": ""Security Event log collection"", ""instructionSteps"": [{""title"": ""Data Collection Rules - Security Event logs"", ""description"": ""**Enable data collection rule for Security Logs**\nSecurity Events logs are collected only from **Windows** agents.\n1. Add chosen DCs on *Resources* tab.\n2. Select Security log level\n\n> **Common level** is the minimum required. Please select 'Common' or 'All Security Events' on DCR definition."", ""instructions"": [{""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 0}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 5] IIS logs of Exchange Servers"", ""description"": ""Select how to stream IIS logs of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> IIS logs are collected only from **Windows** agents."", ""instructions"": [{""type"": ""AdminAuditEvents""}, {""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption5-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create DCR, Type IIS log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. Select the created DCE. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'IIS logs' (Do not enter a path if IIS Logs path is configured by default). Click on 'Add data source'\n6. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\nConfigure the Events you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **IIS Logs**.\n2. Check **Collect W3C format IIS log files**\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 6] Message Tracking of Exchange Servers"", ""description"": ""Select how to stream Message Tracking of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Message Tracking are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""text"": ""**Attention**, Custom logs in Monitor Agent is in Preview. The deployment doesn't work as expected for the moment (March 2023)."", ""inline"": false}, ""type"": ""InfoMessage""}, {""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule and Custom Table"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption6-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE, like ESI-ExchangeServers. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create Custom DCR Table"", ""description"": ""1. Download the Example file from [Microsoft Sentinel GitHub](https://aka.ms/Sentinel-Sample-ESI-MessageTrackingExampleFile).\n2. From the Azure Portal, navigate to [Workspace Analytics](https://portal.azure.com/#view/HubsExtension/BrowseResource/resourceType/Microsoft.OperationalInsights%2Fworkspaces) and select your target Workspace.\n3. Click in 'Tables', click **+ Create** at the top and select **New Custom log (DCR-Based)**.\n4. In the **Basics** tab, enter **MessageTrackingLog** on the Table name, create a Data Collection rule with the name **DCR-Option6-MessageTrackingLogs** (for example) and select the previously created Data collection Endpoint.\n5. In the **Schema and Transformation** tab, choose the downloaded sample file and click on **Transformation Editor**.\n6. In the transformation field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(['date-time'])\n| extend\n clientHostname = ['client-hostname'],\n clientIP = ['client-ip'],\n connectorId = ['connector-id'],\n customData = ['custom-data'],\n eventId = ['event-id'],\n internalMessageId = ['internal-message-id'],\n logId = ['log-id'],\n messageId = ['message-id'],\n messageInfo = ['message-info'],\n messageSubject = ['message-subject'],\n networkMessageId = ['network-message-id'],\n originalClientIp = ['original-client-ip'],\n originalServerIp = ['original-server-ip'],\n recipientAddress= ['recipient-address'],\n recipientCount= ['recipient-count'],\n recipientStatus= ['recipient-status'],\n relatedRecipientAddress= ['related-recipient-address'],\n returnPath= ['return-path'],\n senderAddress= ['sender-address'],\n senderHostname= ['server-hostname'],\n serverIp= ['server-ip'],\n sourceContext= ['source-context'],\n schemaVersion=['schema-version'],\n messageTrackingTenantId = ['tenant-id'],\n totalBytes = ['total-bytes'],\n transportTrafficType = ['transport-traffic-type']\n| project-away\n ['client-ip'],\n ['client-hostname'],\n ['connector-id'],\n ['custom-data'],\n ['date-time'],\n ['event-id'],\n ['internal-message-id'],\n ['log-id'],\n ['message-id'],\n ['message-info'],\n ['message-subject'],\n ['network-message-id'],\n ['original-client-ip'],\n ['original-server-ip'],\n ['recipient-address'],\n ['recipient-count'],\n ['recipient-status'],\n ['related-recipient-address'],\n ['return-path'],\n ['sender-address'],\n ['server-hostname'],\n ['server-ip'],\n ['source-context'],\n ['schema-version'],\n ['tenant-id'],\n ['total-bytes'],\n ['transport-traffic-type']*\n\n8. Click 'Run' and after 'Apply'.\n9. Click **Next**, then click **Create**.""}, {""title"": ""C. Modify the created DCR, Type Custom log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Select the previously created DCR, like **DCR-Option6-MessageTrackingLogs**.\n3. In the **Resources** tab, enter you Exchange Servers.\n4. In **Data Sources**, add a Data Source type 'Custom Text logs' and enter 'C:\\Program Files\\Microsoft\\Exchange Server\\V15\\TransportRoles\\Logs\\MessageTracking\\*.log' in file pattern, 'MessageTrackingLog_CL' in Table Name.\n6.in Transform field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(['date-time'])\n| extend\n clientHostname = ['client-hostname'],\n clientIP = ['client-ip'],\n connectorId = ['connector-id'],\n customData = ['custom-data'],\n eventId = ['event-id'],\n internalMessageId = ['internal-message-id'],\n logId = ['log-id'],\n messageId = ['message-id'],\n messageInfo = ['message-info'],\n messageSubject = ['message-subject'],\n networkMessageId = ['network-message-id'],\n originalClientIp = ['original-client-ip'],\n originalServerIp = ['original-server-ip'],\n recipientAddress= ['recipient-address'],\n recipientCount= ['recipient-count'],\n recipientStatus= ['recipient-status'],\n relatedRecipientAddress= ['related-recipient-address'],\n returnPath= ['return-path'],\n senderAddress= ['sender-address'],\n senderHostname= ['server-hostname'],\n serverIp= ['server-ip'],\n sourceContext= ['source-context'],\n schemaVersion=['schema-version'],\n messageTrackingTenantId = ['tenant-id'],\n totalBytes = ['total-bytes'],\n transportTrafficType = ['transport-traffic-type']\n| project-away\n ['client-ip'],\n ['client-hostname'],\n ['connector-id'],\n ['custom-data'],\n ['date-time'],\n ['event-id'],\n ['internal-message-id'],\n ['log-id'],\n ['message-id'],\n ['message-info'],\n ['message-subject'],\n ['network-message-id'],\n ['original-client-ip'],\n ['original-server-ip'],\n ['recipient-address'],\n ['recipient-count'],\n ['recipient-status'],\n ['related-recipient-address'],\n ['return-path'],\n ['sender-address'],\n ['server-hostname'],\n ['server-ip'],\n ['source-context'],\n ['schema-version'],\n ['tenant-id'],\n ['total-bytes'],\n ['transport-traffic-type']* \n7. Click on 'Add data source'.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\n1. Under workspace **Settings** part, select **Tables**, click **+ Create** and click on **New custom log (MMA-Based)**.\n2. Select Sample file **[MessageTracking Sample](https://aka.ms/Sentinel-Sample-ESI-MessageTrackingLogsSampleCSV)** and click Next\n3. Select type **Windows** and enter the path **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\TransportRoles\\Logs\\MessageTracking\\*.log**. Click Next.\n4. Enter **MessageTrackingLog** as Table name and click Next.\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 7] HTTP Proxy of Exchange Servers"", ""description"": ""Select how to stream HTTP Proxy of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Message Tracking are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""text"": ""**Attention**, Custom logs in Monitor Agent is in Preview. The deployment doesn't work as expected for the moment (March 2023)."", ""inline"": false}, ""type"": ""InfoMessage""}, {""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption7-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create Custom DCR Table"", ""description"": ""1. Download the Example file from [Microsoft Sentinel GitHub](https://aka.ms/Sentinel-Sample-ESI-HTTPProxyExampleFile).\n2. From the Azure Portal, navigate to [Workspace Analytics](https://portal.azure.com/#view/HubsExtension/BrowseResource/resourceType/Microsoft.OperationalInsights%2Fworkspaces) and select your target Workspace.\n3. Click in 'Tables', click **+ Create** at the top and select **New Custom log (DCR-Based)**.\n4. In the **Basics** tab, enter **ExchangeHttpProxy** on the Table name, create a Data Collection rule with the name **DCR-Option7-HTTPProxyLogs** (for example) and select the previously created Data collection Endpoint.\n5. In the **Schema and Transformation** tab, choose the downloaded sample file and click on **Transformation Editor**.\n6. In the transformation field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(DateTime)\n| project-away DateTime\n*\n\n8. Click 'Run' and after 'Apply'.\n9. Click **Next**, then click **Create**.""}, {""title"": ""C. Modify the created DCR, Type Custom log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Select the previously created DCR, like **DCR-Option7-HTTPProxyLogs**.\n3. In the **Resources** tab, enter you Exchange Servers.\n4. In **Data Sources**, add a Data Source type 'Custom Text logs' and enter 'C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Autodiscover\\*.log' in file pattern, 'ExchangeHttpProxy_CL' in Table Name.\n6.in Transform field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(DateTime)\n| project-away DateTime* \n7. Click on 'Add data source'.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\n1. Under workspace **Settings** part, select **Tables**, click **+ Create** and click on **New custom log (MMA-Based)**.\n2. Select Sample file **[MessageTracking Sample](https://aka.ms/Sentinel-Sample-ESI-HttpProxySampleCSV)** and click Next\n3. Select type **Windows** and enter all the following paths **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Autodiscover\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Eas\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Ecp\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Ews\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Mapi\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Oab\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Owa\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\OwaCalendar\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\PowerShell\\*.log** and **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\RpcHttp\\*.log** . Click Next.\n4. Enter **ExchangeHttpProxy** as Table name and click Next.\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. Parsers are automatically deployed with the solution. Follow the steps to create the Kusto Functions alias : [**ExchangeAdminAuditLogs**](https://aka.ms/sentinel-ESI-ExchangeCollector-ExchangeAdminAuditLogs-parser)"", ""instructions"": [{""parameters"": {""title"": ""Parsers are automatically deployed during Solution deployment. If you want to deploy manually, follow the steps below"", ""instructionSteps"": [{""title"": ""Manual Parser Deployment"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""1. Download the Parser file"", ""description"": ""The latest version of the file [**ExchangeAdminAuditLogs**](https://aka.ms/sentinel-ESI-ExchangeCollector-ExchangeAdminAuditLogs-parser)""}, {""title"": ""2. Create Parser **ExchangeAdminAuditLogs** function"", ""description"": ""In 'Logs' explorer of your Microsoft Sentinel's log analytics, copy the content of the file to Log explorer""}, {""title"": ""3. Save Parser **ExchangeAdminAuditLogs** function"", ""description"": ""Click on save button.\n No parameter is needed for this parser.\nClick save again.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""Azure Log Analytics will be deprecated, to collect data from non-Azure VMs, Azure Arc is recommended. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""name"": ""Detailled documentation"", ""description"": "">**NOTE:** Detailled documentation on Installation procedure and usage can be found [here](https://aka.ms/MicrosoftExchangeSecurityGithub)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises/Data%20Connectors/ESI-ExchangeAdminAuditLogEvents.json","true" -"SecurityEvent","Microsoft Exchange Security - Exchange On-Premises","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises","microsoftsentinelcommunity","azure-sentinel-solution-exchangesecurityinsights","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ESI-ExchangeAdminAuditLogEvents","Microsoft","[Deprecated] Microsoft Exchange Logs and Events","Deprecated, use the 'ESI-Opt' dataconnectors. You can stream all Exchange Audit events, IIS Logs, HTTP Proxy logs and Security Event logs from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you to view dashboards, create custom alerts, and improve investigation. This is used by Microsoft Exchange Security Workbooks to provide security insights of your On-Premises Exchange environment","[{""description"": "">**NOTE:** This solution is based on options. This allows you to choose which data will be ingest as some options can generate a very high volume of data. Depending on what you want to collect, track in your Workbooks, Analytics Rules, Hunting capabilities you will choose the option(s) you will deploy. Each options are independant for one from the other. To learn more about each option: ['Microsoft Exchange Security' wiki](https://aka.ms/ESI_DataConnectorOptions)""}, {""title"": ""1. Download and install the agents needed to collect logs for Microsoft Sentinel"", ""description"": ""Type of servers (Exchange Servers, Domain Controllers linked to Exchange Servers or all Domain Controllers) depends on the option you want to deploy."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Deploy Monitor Agents"", ""description"": ""This step is required only if it's the first time you onboard your Exchange Servers/Domain Controllers"", ""instructions"": [{""parameters"": {""title"": ""Select which agent you want to install in your servers to collect logs:"", ""instructionSteps"": [{""title"": ""[Prefered] Azure Monitor Agent via Azure Arc"", ""description"": ""**Deploy the Azure Arc Agent**\n> [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""title"": ""Install Azure Log Analytics Agent (Deprecated on 31/08/2024)"", ""description"": ""1. Download the Azure Log Analytics Agent and choose the deployment method in the below link."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Deploy log injestion following choosed options"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""[Option 1] MS Exchange Management Log collection"", ""description"": ""Select how to stream MS Exchange Admin Audit event logs"", ""instructions"": [{""parameters"": {""title"": ""MS Exchange Admin Audit event logs"", ""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Microsoft Exchange Admin Audit Events logs are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCR.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption1-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace Name** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCR, Type Event log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'Windows Event logs' and select 'Custom' option, enter 'MSExchange Management' as expression and Add it.\n6. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\nConfigure the Events you want to collect and their severities.\n\n1. Under workspace **Legacy agents management**, select **Windows Event logs**.\n2. Click **Add Windows event log** and enter **MSExchange Management** as log name.\n3. Collect Error, Warning and Information types\n4. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 2] Security/Application/System logs of Exchange Servers"", ""description"": ""Select how to stream Security/Application/System logs of Exchange Servers"", ""instructions"": [{""parameters"": {""title"": ""Security Event log collection"", ""instructionSteps"": [{""title"": ""Data Collection Rules - Security Event logs"", ""description"": ""**Enable data collection rule for Security Logs**\nSecurity Events logs are collected only from **Windows** agents.\n1. Add Exchange Servers on *Resources* tab.\n2. Select Security log level\n\n> **Common level** is the minimum required. Please select 'Common' or 'All Security Events' on DCR definition."", ""instructions"": [{""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 0}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""title"": ""Application and System Event log collection"", ""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Application and System Events logs are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCR.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace Name** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCR, Type Event log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'Windows Event logs' and select 'Basic' option.\n6. For Application, select 'Critical', 'Error' and 'Warning'. For System, select Critical/Error/Warning/Information. \n7. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\nConfigure the Events you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **Windows Event logs**.\n2. Click **Add Windows event log** and search **Application** as log name.\n3. Click **Add Windows event log** and search **System** as log name.\n4. Collect Error (for all), Warning (for all) and Information (for System) types\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 3 and 4] Security logs of Domain Controllers"", ""description"": ""Select how to stream Security logs of Domain Controllers. If you want to implement Option 3, you just need to select DC on same site as Exchange Servers. If you want to implement Option 4, you can select all DCs of your forest."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""[Option 3] List only Domain Controllers on the same site as Exchange Servers for next step"", ""description"": ""**This limits the quantity of data injested but some incident can't be detected.**""}, {""title"": ""[Option 4] List all Domain Controllers of your Active-Directory Forest for next step"", ""description"": ""**This allows collecting all security events**""}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""title"": ""Security Event log collection"", ""instructionSteps"": [{""title"": ""Data Collection Rules - Security Event logs"", ""description"": ""**Enable data collection rule for Security Logs**\nSecurity Events logs are collected only from **Windows** agents.\n1. Add chosen DCs on *Resources* tab.\n2. Select Security log level\n\n> **Common level** is the minimum required. Please select 'Common' or 'All Security Events' on DCR definition."", ""instructions"": [{""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 0}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 5] IIS logs of Exchange Servers"", ""description"": ""Select how to stream IIS logs of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> IIS logs are collected only from **Windows** agents."", ""instructions"": [{""type"": ""AdminAuditEvents""}, {""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption5-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create DCR, Type IIS log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. Select the created DCE. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'IIS logs' (Do not enter a path if IIS Logs path is configured by default). Click on 'Add data source'\n6. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\nConfigure the Events you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **IIS Logs**.\n2. Check **Collect W3C format IIS log files**\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 6] Message Tracking of Exchange Servers"", ""description"": ""Select how to stream Message Tracking of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Message Tracking are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""text"": ""**Attention**, Custom logs in Monitor Agent is in Preview. The deployment doesn't work as expected for the moment (March 2023)."", ""inline"": false}, ""type"": ""InfoMessage""}, {""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule and Custom Table"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption6-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE, like ESI-ExchangeServers. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create Custom DCR Table"", ""description"": ""1. Download the Example file from [Microsoft Sentinel GitHub](https://aka.ms/Sentinel-Sample-ESI-MessageTrackingExampleFile).\n2. From the Azure Portal, navigate to [Workspace Analytics](https://portal.azure.com/#view/HubsExtension/BrowseResource/resourceType/Microsoft.OperationalInsights%2Fworkspaces) and select your target Workspace.\n3. Click in 'Tables', click **+ Create** at the top and select **New Custom log (DCR-Based)**.\n4. In the **Basics** tab, enter **MessageTrackingLog** on the Table name, create a Data Collection rule with the name **DCR-Option6-MessageTrackingLogs** (for example) and select the previously created Data collection Endpoint.\n5. In the **Schema and Transformation** tab, choose the downloaded sample file and click on **Transformation Editor**.\n6. In the transformation field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(['date-time'])\n| extend\n clientHostname = ['client-hostname'],\n clientIP = ['client-ip'],\n connectorId = ['connector-id'],\n customData = ['custom-data'],\n eventId = ['event-id'],\n internalMessageId = ['internal-message-id'],\n logId = ['log-id'],\n messageId = ['message-id'],\n messageInfo = ['message-info'],\n messageSubject = ['message-subject'],\n networkMessageId = ['network-message-id'],\n originalClientIp = ['original-client-ip'],\n originalServerIp = ['original-server-ip'],\n recipientAddress= ['recipient-address'],\n recipientCount= ['recipient-count'],\n recipientStatus= ['recipient-status'],\n relatedRecipientAddress= ['related-recipient-address'],\n returnPath= ['return-path'],\n senderAddress= ['sender-address'],\n senderHostname= ['server-hostname'],\n serverIp= ['server-ip'],\n sourceContext= ['source-context'],\n schemaVersion=['schema-version'],\n messageTrackingTenantId = ['tenant-id'],\n totalBytes = ['total-bytes'],\n transportTrafficType = ['transport-traffic-type']\n| project-away\n ['client-ip'],\n ['client-hostname'],\n ['connector-id'],\n ['custom-data'],\n ['date-time'],\n ['event-id'],\n ['internal-message-id'],\n ['log-id'],\n ['message-id'],\n ['message-info'],\n ['message-subject'],\n ['network-message-id'],\n ['original-client-ip'],\n ['original-server-ip'],\n ['recipient-address'],\n ['recipient-count'],\n ['recipient-status'],\n ['related-recipient-address'],\n ['return-path'],\n ['sender-address'],\n ['server-hostname'],\n ['server-ip'],\n ['source-context'],\n ['schema-version'],\n ['tenant-id'],\n ['total-bytes'],\n ['transport-traffic-type']*\n\n8. Click 'Run' and after 'Apply'.\n9. Click **Next**, then click **Create**.""}, {""title"": ""C. Modify the created DCR, Type Custom log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Select the previously created DCR, like **DCR-Option6-MessageTrackingLogs**.\n3. In the **Resources** tab, enter you Exchange Servers.\n4. In **Data Sources**, add a Data Source type 'Custom Text logs' and enter 'C:\\Program Files\\Microsoft\\Exchange Server\\V15\\TransportRoles\\Logs\\MessageTracking\\*.log' in file pattern, 'MessageTrackingLog_CL' in Table Name.\n6.in Transform field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(['date-time'])\n| extend\n clientHostname = ['client-hostname'],\n clientIP = ['client-ip'],\n connectorId = ['connector-id'],\n customData = ['custom-data'],\n eventId = ['event-id'],\n internalMessageId = ['internal-message-id'],\n logId = ['log-id'],\n messageId = ['message-id'],\n messageInfo = ['message-info'],\n messageSubject = ['message-subject'],\n networkMessageId = ['network-message-id'],\n originalClientIp = ['original-client-ip'],\n originalServerIp = ['original-server-ip'],\n recipientAddress= ['recipient-address'],\n recipientCount= ['recipient-count'],\n recipientStatus= ['recipient-status'],\n relatedRecipientAddress= ['related-recipient-address'],\n returnPath= ['return-path'],\n senderAddress= ['sender-address'],\n senderHostname= ['server-hostname'],\n serverIp= ['server-ip'],\n sourceContext= ['source-context'],\n schemaVersion=['schema-version'],\n messageTrackingTenantId = ['tenant-id'],\n totalBytes = ['total-bytes'],\n transportTrafficType = ['transport-traffic-type']\n| project-away\n ['client-ip'],\n ['client-hostname'],\n ['connector-id'],\n ['custom-data'],\n ['date-time'],\n ['event-id'],\n ['internal-message-id'],\n ['log-id'],\n ['message-id'],\n ['message-info'],\n ['message-subject'],\n ['network-message-id'],\n ['original-client-ip'],\n ['original-server-ip'],\n ['recipient-address'],\n ['recipient-count'],\n ['recipient-status'],\n ['related-recipient-address'],\n ['return-path'],\n ['sender-address'],\n ['server-hostname'],\n ['server-ip'],\n ['source-context'],\n ['schema-version'],\n ['tenant-id'],\n ['total-bytes'],\n ['transport-traffic-type']* \n7. Click on 'Add data source'.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\n1. Under workspace **Settings** part, select **Tables**, click **+ Create** and click on **New custom log (MMA-Based)**.\n2. Select Sample file **[MessageTracking Sample](https://aka.ms/Sentinel-Sample-ESI-MessageTrackingLogsSampleCSV)** and click Next\n3. Select type **Windows** and enter the path **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\TransportRoles\\Logs\\MessageTracking\\*.log**. Click Next.\n4. Enter **MessageTrackingLog** as Table name and click Next.\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 7] HTTP Proxy of Exchange Servers"", ""description"": ""Select how to stream HTTP Proxy of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Message Tracking are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""text"": ""**Attention**, Custom logs in Monitor Agent is in Preview. The deployment doesn't work as expected for the moment (March 2023)."", ""inline"": false}, ""type"": ""InfoMessage""}, {""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption7-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create Custom DCR Table"", ""description"": ""1. Download the Example file from [Microsoft Sentinel GitHub](https://aka.ms/Sentinel-Sample-ESI-HTTPProxyExampleFile).\n2. From the Azure Portal, navigate to [Workspace Analytics](https://portal.azure.com/#view/HubsExtension/BrowseResource/resourceType/Microsoft.OperationalInsights%2Fworkspaces) and select your target Workspace.\n3. Click in 'Tables', click **+ Create** at the top and select **New Custom log (DCR-Based)**.\n4. In the **Basics** tab, enter **ExchangeHttpProxy** on the Table name, create a Data Collection rule with the name **DCR-Option7-HTTPProxyLogs** (for example) and select the previously created Data collection Endpoint.\n5. In the **Schema and Transformation** tab, choose the downloaded sample file and click on **Transformation Editor**.\n6. In the transformation field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(DateTime)\n| project-away DateTime\n*\n\n8. Click 'Run' and after 'Apply'.\n9. Click **Next**, then click **Create**.""}, {""title"": ""C. Modify the created DCR, Type Custom log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Select the previously created DCR, like **DCR-Option7-HTTPProxyLogs**.\n3. In the **Resources** tab, enter you Exchange Servers.\n4. In **Data Sources**, add a Data Source type 'Custom Text logs' and enter 'C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Autodiscover\\*.log' in file pattern, 'ExchangeHttpProxy_CL' in Table Name.\n6.in Transform field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(DateTime)\n| project-away DateTime* \n7. Click on 'Add data source'.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\n1. Under workspace **Settings** part, select **Tables**, click **+ Create** and click on **New custom log (MMA-Based)**.\n2. Select Sample file **[MessageTracking Sample](https://aka.ms/Sentinel-Sample-ESI-HttpProxySampleCSV)** and click Next\n3. Select type **Windows** and enter all the following paths **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Autodiscover\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Eas\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Ecp\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Ews\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Mapi\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Oab\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Owa\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\OwaCalendar\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\PowerShell\\*.log** and **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\RpcHttp\\*.log** . Click Next.\n4. Enter **ExchangeHttpProxy** as Table name and click Next.\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. Parsers are automatically deployed with the solution. Follow the steps to create the Kusto Functions alias : [**ExchangeAdminAuditLogs**](https://aka.ms/sentinel-ESI-ExchangeCollector-ExchangeAdminAuditLogs-parser)"", ""instructions"": [{""parameters"": {""title"": ""Parsers are automatically deployed during Solution deployment. If you want to deploy manually, follow the steps below"", ""instructionSteps"": [{""title"": ""Manual Parser Deployment"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""1. Download the Parser file"", ""description"": ""The latest version of the file [**ExchangeAdminAuditLogs**](https://aka.ms/sentinel-ESI-ExchangeCollector-ExchangeAdminAuditLogs-parser)""}, {""title"": ""2. Create Parser **ExchangeAdminAuditLogs** function"", ""description"": ""In 'Logs' explorer of your Microsoft Sentinel's log analytics, copy the content of the file to Log explorer""}, {""title"": ""3. Save Parser **ExchangeAdminAuditLogs** function"", ""description"": ""Click on save button.\n No parameter is needed for this parser.\nClick save again.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""Azure Log Analytics will be deprecated, to collect data from non-Azure VMs, Azure Arc is recommended. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""name"": ""Detailled documentation"", ""description"": "">**NOTE:** Detailled documentation on Installation procedure and usage can be found [here](https://aka.ms/MicrosoftExchangeSecurityGithub)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises/Data%20Connectors/ESI-ExchangeAdminAuditLogEvents.json","true" -"W3CIISLog","Microsoft Exchange Security - Exchange On-Premises","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises","microsoftsentinelcommunity","azure-sentinel-solution-exchangesecurityinsights","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ESI-ExchangeAdminAuditLogEvents","Microsoft","[Deprecated] Microsoft Exchange Logs and Events","Deprecated, use the 'ESI-Opt' dataconnectors. You can stream all Exchange Audit events, IIS Logs, HTTP Proxy logs and Security Event logs from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you to view dashboards, create custom alerts, and improve investigation. This is used by Microsoft Exchange Security Workbooks to provide security insights of your On-Premises Exchange environment","[{""description"": "">**NOTE:** This solution is based on options. This allows you to choose which data will be ingest as some options can generate a very high volume of data. Depending on what you want to collect, track in your Workbooks, Analytics Rules, Hunting capabilities you will choose the option(s) you will deploy. Each options are independant for one from the other. To learn more about each option: ['Microsoft Exchange Security' wiki](https://aka.ms/ESI_DataConnectorOptions)""}, {""title"": ""1. Download and install the agents needed to collect logs for Microsoft Sentinel"", ""description"": ""Type of servers (Exchange Servers, Domain Controllers linked to Exchange Servers or all Domain Controllers) depends on the option you want to deploy."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Deploy Monitor Agents"", ""description"": ""This step is required only if it's the first time you onboard your Exchange Servers/Domain Controllers"", ""instructions"": [{""parameters"": {""title"": ""Select which agent you want to install in your servers to collect logs:"", ""instructionSteps"": [{""title"": ""[Prefered] Azure Monitor Agent via Azure Arc"", ""description"": ""**Deploy the Azure Arc Agent**\n> [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""title"": ""Install Azure Log Analytics Agent (Deprecated on 31/08/2024)"", ""description"": ""1. Download the Azure Log Analytics Agent and choose the deployment method in the below link."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Deploy log injestion following choosed options"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""[Option 1] MS Exchange Management Log collection"", ""description"": ""Select how to stream MS Exchange Admin Audit event logs"", ""instructions"": [{""parameters"": {""title"": ""MS Exchange Admin Audit event logs"", ""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Microsoft Exchange Admin Audit Events logs are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCR.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption1-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace Name** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCR, Type Event log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'Windows Event logs' and select 'Custom' option, enter 'MSExchange Management' as expression and Add it.\n6. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\nConfigure the Events you want to collect and their severities.\n\n1. Under workspace **Legacy agents management**, select **Windows Event logs**.\n2. Click **Add Windows event log** and enter **MSExchange Management** as log name.\n3. Collect Error, Warning and Information types\n4. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 2] Security/Application/System logs of Exchange Servers"", ""description"": ""Select how to stream Security/Application/System logs of Exchange Servers"", ""instructions"": [{""parameters"": {""title"": ""Security Event log collection"", ""instructionSteps"": [{""title"": ""Data Collection Rules - Security Event logs"", ""description"": ""**Enable data collection rule for Security Logs**\nSecurity Events logs are collected only from **Windows** agents.\n1. Add Exchange Servers on *Resources* tab.\n2. Select Security log level\n\n> **Common level** is the minimum required. Please select 'Common' or 'All Security Events' on DCR definition."", ""instructions"": [{""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 0}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""title"": ""Application and System Event log collection"", ""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Application and System Events logs are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCR.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace Name** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCR, Type Event log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'Windows Event logs' and select 'Basic' option.\n6. For Application, select 'Critical', 'Error' and 'Warning'. For System, select Critical/Error/Warning/Information. \n7. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\nConfigure the Events you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **Windows Event logs**.\n2. Click **Add Windows event log** and search **Application** as log name.\n3. Click **Add Windows event log** and search **System** as log name.\n4. Collect Error (for all), Warning (for all) and Information (for System) types\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 3 and 4] Security logs of Domain Controllers"", ""description"": ""Select how to stream Security logs of Domain Controllers. If you want to implement Option 3, you just need to select DC on same site as Exchange Servers. If you want to implement Option 4, you can select all DCs of your forest."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""[Option 3] List only Domain Controllers on the same site as Exchange Servers for next step"", ""description"": ""**This limits the quantity of data injested but some incident can't be detected.**""}, {""title"": ""[Option 4] List all Domain Controllers of your Active-Directory Forest for next step"", ""description"": ""**This allows collecting all security events**""}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""title"": ""Security Event log collection"", ""instructionSteps"": [{""title"": ""Data Collection Rules - Security Event logs"", ""description"": ""**Enable data collection rule for Security Logs**\nSecurity Events logs are collected only from **Windows** agents.\n1. Add chosen DCs on *Resources* tab.\n2. Select Security log level\n\n> **Common level** is the minimum required. Please select 'Common' or 'All Security Events' on DCR definition."", ""instructions"": [{""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 0}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 5] IIS logs of Exchange Servers"", ""description"": ""Select how to stream IIS logs of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> IIS logs are collected only from **Windows** agents."", ""instructions"": [{""type"": ""AdminAuditEvents""}, {""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption5-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create DCR, Type IIS log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. Select the created DCE. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'IIS logs' (Do not enter a path if IIS Logs path is configured by default). Click on 'Add data source'\n6. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\nConfigure the Events you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **IIS Logs**.\n2. Check **Collect W3C format IIS log files**\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 6] Message Tracking of Exchange Servers"", ""description"": ""Select how to stream Message Tracking of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Message Tracking are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""text"": ""**Attention**, Custom logs in Monitor Agent is in Preview. The deployment doesn't work as expected for the moment (March 2023)."", ""inline"": false}, ""type"": ""InfoMessage""}, {""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule and Custom Table"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption6-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE, like ESI-ExchangeServers. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create Custom DCR Table"", ""description"": ""1. Download the Example file from [Microsoft Sentinel GitHub](https://aka.ms/Sentinel-Sample-ESI-MessageTrackingExampleFile).\n2. From the Azure Portal, navigate to [Workspace Analytics](https://portal.azure.com/#view/HubsExtension/BrowseResource/resourceType/Microsoft.OperationalInsights%2Fworkspaces) and select your target Workspace.\n3. Click in 'Tables', click **+ Create** at the top and select **New Custom log (DCR-Based)**.\n4. In the **Basics** tab, enter **MessageTrackingLog** on the Table name, create a Data Collection rule with the name **DCR-Option6-MessageTrackingLogs** (for example) and select the previously created Data collection Endpoint.\n5. In the **Schema and Transformation** tab, choose the downloaded sample file and click on **Transformation Editor**.\n6. In the transformation field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(['date-time'])\n| extend\n clientHostname = ['client-hostname'],\n clientIP = ['client-ip'],\n connectorId = ['connector-id'],\n customData = ['custom-data'],\n eventId = ['event-id'],\n internalMessageId = ['internal-message-id'],\n logId = ['log-id'],\n messageId = ['message-id'],\n messageInfo = ['message-info'],\n messageSubject = ['message-subject'],\n networkMessageId = ['network-message-id'],\n originalClientIp = ['original-client-ip'],\n originalServerIp = ['original-server-ip'],\n recipientAddress= ['recipient-address'],\n recipientCount= ['recipient-count'],\n recipientStatus= ['recipient-status'],\n relatedRecipientAddress= ['related-recipient-address'],\n returnPath= ['return-path'],\n senderAddress= ['sender-address'],\n senderHostname= ['server-hostname'],\n serverIp= ['server-ip'],\n sourceContext= ['source-context'],\n schemaVersion=['schema-version'],\n messageTrackingTenantId = ['tenant-id'],\n totalBytes = ['total-bytes'],\n transportTrafficType = ['transport-traffic-type']\n| project-away\n ['client-ip'],\n ['client-hostname'],\n ['connector-id'],\n ['custom-data'],\n ['date-time'],\n ['event-id'],\n ['internal-message-id'],\n ['log-id'],\n ['message-id'],\n ['message-info'],\n ['message-subject'],\n ['network-message-id'],\n ['original-client-ip'],\n ['original-server-ip'],\n ['recipient-address'],\n ['recipient-count'],\n ['recipient-status'],\n ['related-recipient-address'],\n ['return-path'],\n ['sender-address'],\n ['server-hostname'],\n ['server-ip'],\n ['source-context'],\n ['schema-version'],\n ['tenant-id'],\n ['total-bytes'],\n ['transport-traffic-type']*\n\n8. Click 'Run' and after 'Apply'.\n9. Click **Next**, then click **Create**.""}, {""title"": ""C. Modify the created DCR, Type Custom log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Select the previously created DCR, like **DCR-Option6-MessageTrackingLogs**.\n3. In the **Resources** tab, enter you Exchange Servers.\n4. In **Data Sources**, add a Data Source type 'Custom Text logs' and enter 'C:\\Program Files\\Microsoft\\Exchange Server\\V15\\TransportRoles\\Logs\\MessageTracking\\*.log' in file pattern, 'MessageTrackingLog_CL' in Table Name.\n6.in Transform field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(['date-time'])\n| extend\n clientHostname = ['client-hostname'],\n clientIP = ['client-ip'],\n connectorId = ['connector-id'],\n customData = ['custom-data'],\n eventId = ['event-id'],\n internalMessageId = ['internal-message-id'],\n logId = ['log-id'],\n messageId = ['message-id'],\n messageInfo = ['message-info'],\n messageSubject = ['message-subject'],\n networkMessageId = ['network-message-id'],\n originalClientIp = ['original-client-ip'],\n originalServerIp = ['original-server-ip'],\n recipientAddress= ['recipient-address'],\n recipientCount= ['recipient-count'],\n recipientStatus= ['recipient-status'],\n relatedRecipientAddress= ['related-recipient-address'],\n returnPath= ['return-path'],\n senderAddress= ['sender-address'],\n senderHostname= ['server-hostname'],\n serverIp= ['server-ip'],\n sourceContext= ['source-context'],\n schemaVersion=['schema-version'],\n messageTrackingTenantId = ['tenant-id'],\n totalBytes = ['total-bytes'],\n transportTrafficType = ['transport-traffic-type']\n| project-away\n ['client-ip'],\n ['client-hostname'],\n ['connector-id'],\n ['custom-data'],\n ['date-time'],\n ['event-id'],\n ['internal-message-id'],\n ['log-id'],\n ['message-id'],\n ['message-info'],\n ['message-subject'],\n ['network-message-id'],\n ['original-client-ip'],\n ['original-server-ip'],\n ['recipient-address'],\n ['recipient-count'],\n ['recipient-status'],\n ['related-recipient-address'],\n ['return-path'],\n ['sender-address'],\n ['server-hostname'],\n ['server-ip'],\n ['source-context'],\n ['schema-version'],\n ['tenant-id'],\n ['total-bytes'],\n ['transport-traffic-type']* \n7. Click on 'Add data source'.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\n1. Under workspace **Settings** part, select **Tables**, click **+ Create** and click on **New custom log (MMA-Based)**.\n2. Select Sample file **[MessageTracking Sample](https://aka.ms/Sentinel-Sample-ESI-MessageTrackingLogsSampleCSV)** and click Next\n3. Select type **Windows** and enter the path **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\TransportRoles\\Logs\\MessageTracking\\*.log**. Click Next.\n4. Enter **MessageTrackingLog** as Table name and click Next.\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 7] HTTP Proxy of Exchange Servers"", ""description"": ""Select how to stream HTTP Proxy of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Message Tracking are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""text"": ""**Attention**, Custom logs in Monitor Agent is in Preview. The deployment doesn't work as expected for the moment (March 2023)."", ""inline"": false}, ""type"": ""InfoMessage""}, {""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption7-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create Custom DCR Table"", ""description"": ""1. Download the Example file from [Microsoft Sentinel GitHub](https://aka.ms/Sentinel-Sample-ESI-HTTPProxyExampleFile).\n2. From the Azure Portal, navigate to [Workspace Analytics](https://portal.azure.com/#view/HubsExtension/BrowseResource/resourceType/Microsoft.OperationalInsights%2Fworkspaces) and select your target Workspace.\n3. Click in 'Tables', click **+ Create** at the top and select **New Custom log (DCR-Based)**.\n4. In the **Basics** tab, enter **ExchangeHttpProxy** on the Table name, create a Data Collection rule with the name **DCR-Option7-HTTPProxyLogs** (for example) and select the previously created Data collection Endpoint.\n5. In the **Schema and Transformation** tab, choose the downloaded sample file and click on **Transformation Editor**.\n6. In the transformation field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(DateTime)\n| project-away DateTime\n*\n\n8. Click 'Run' and after 'Apply'.\n9. Click **Next**, then click **Create**.""}, {""title"": ""C. Modify the created DCR, Type Custom log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Select the previously created DCR, like **DCR-Option7-HTTPProxyLogs**.\n3. In the **Resources** tab, enter you Exchange Servers.\n4. In **Data Sources**, add a Data Source type 'Custom Text logs' and enter 'C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Autodiscover\\*.log' in file pattern, 'ExchangeHttpProxy_CL' in Table Name.\n6.in Transform field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(DateTime)\n| project-away DateTime* \n7. Click on 'Add data source'.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\n1. Under workspace **Settings** part, select **Tables**, click **+ Create** and click on **New custom log (MMA-Based)**.\n2. Select Sample file **[MessageTracking Sample](https://aka.ms/Sentinel-Sample-ESI-HttpProxySampleCSV)** and click Next\n3. Select type **Windows** and enter all the following paths **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Autodiscover\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Eas\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Ecp\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Ews\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Mapi\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Oab\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Owa\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\OwaCalendar\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\PowerShell\\*.log** and **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\RpcHttp\\*.log** . Click Next.\n4. Enter **ExchangeHttpProxy** as Table name and click Next.\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. Parsers are automatically deployed with the solution. Follow the steps to create the Kusto Functions alias : [**ExchangeAdminAuditLogs**](https://aka.ms/sentinel-ESI-ExchangeCollector-ExchangeAdminAuditLogs-parser)"", ""instructions"": [{""parameters"": {""title"": ""Parsers are automatically deployed during Solution deployment. If you want to deploy manually, follow the steps below"", ""instructionSteps"": [{""title"": ""Manual Parser Deployment"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""1. Download the Parser file"", ""description"": ""The latest version of the file [**ExchangeAdminAuditLogs**](https://aka.ms/sentinel-ESI-ExchangeCollector-ExchangeAdminAuditLogs-parser)""}, {""title"": ""2. Create Parser **ExchangeAdminAuditLogs** function"", ""description"": ""In 'Logs' explorer of your Microsoft Sentinel's log analytics, copy the content of the file to Log explorer""}, {""title"": ""3. Save Parser **ExchangeAdminAuditLogs** function"", ""description"": ""Click on save button.\n No parameter is needed for this parser.\nClick save again.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""Azure Log Analytics will be deprecated, to collect data from non-Azure VMs, Azure Arc is recommended. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""name"": ""Detailled documentation"", ""description"": "">**NOTE:** Detailled documentation on Installation procedure and usage can be found [here](https://aka.ms/MicrosoftExchangeSecurityGithub)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises/Data%20Connectors/ESI-ExchangeAdminAuditLogEvents.json","true" -"ESIExchangeConfig_CL","Microsoft Exchange Security - Exchange On-Premises","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises","microsoftsentinelcommunity","azure-sentinel-solution-exchangesecurityinsights","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ESI-ExchangeOnPremisesCollector","Microsoft","Exchange Security Insights On-Premises Collector","Connector used to push Exchange On-Premises Security configuration for Microsoft Sentinel Analysis","[{""title"": ""1. Install the ESI Collector Script on a server with Exchange Admin PowerShell console"", ""description"": ""This is the script that will collect Exchange Information to push content in Microsoft Sentinel.\n "", ""instructions"": [{""parameters"": {""title"": ""Script Deployment"", ""instructionSteps"": [{""title"": ""Download the latest version of ESI Collector"", ""description"": ""The latest version can be found here : https://aka.ms/ESI-ExchangeCollector-Script. The file to download is CollectExchSecIns.zip""}, {""title"": ""Copy the script folder"", ""description"": ""Unzip the content and copy the script folder on a server where Exchange PowerShell Cmdlets are present.""}, {""title"": ""Unblock the PS1 Scripts"", ""description"": ""Click right on each PS1 Script and go to Properties tab.\n If the script is marked as blocked, unblock it. You can also use the Cmdlet 'Unblock-File *.* in the unzipped folder using PowerShell.""}, {""title"": ""Configure Network Access "", ""description"": ""Ensure that the script can contact Azure Analytics (*.ods.opinsights.azure.com).""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the ESI Collector Script"", ""description"": ""Be sure to be local administrator of the server.\nIn 'Run as Administrator' mode, launch the 'setup.ps1' script to configure the collector.\n Fill the Log Analytics (Microsoft Sentinel) Workspace information.\n Fill the Environment name or leave empty. By default, choose 'Def' as Default analysis. The other choices are for specific usage."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Schedule the ESI Collector Script (If not done by the Install Script due to lack of permission or ignored during installation)"", ""description"": ""The script needs to be scheduled to send Exchange configuration to Microsoft Sentinel.\n We recommend to schedule the script once a day.\n The account used to launch the Script needs to be member of the group Organization Management""}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. Parsers are automatically deployed with the solution. Follow the steps to create the Kusto Functions alias : [**ExchangeAdminAuditLogs**](https://aka.ms/sentinel-ESI-ExchangeCollector-ExchangeAdminAuditLogs-parser)"", ""instructions"": [{""parameters"": {""title"": ""Parsers are automatically deployed during Solution deployment. If you want to deploy manually, follow the steps below"", ""instructionSteps"": [{""title"": ""Manual Parser Deployment"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""1. Download the Parser file"", ""description"": ""The latest version of the file [**ExchangeAdminAuditLogs**](https://aka.ms/sentinel-ESI-ExchangeCollector-ExchangeAdminAuditLogs-parser)""}, {""title"": ""2. Create Parser **ExchangeAdminAuditLogs** function"", ""description"": ""In 'Logs' explorer of your Microsoft Sentinel's log analytics, copy the content of the file to Log explorer""}, {""title"": ""3. Save Parser **ExchangeAdminAuditLogs** function"", ""description"": ""Click on save button.\n No parameter is needed for this parser.\nClick save again.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Service Account with Organization Management role"", ""description"": ""The service Account that launch the script as scheduled task needs to be Organization Management to be able to retrieve all the needed security Information.""}, {""name"": ""Detailled documentation"", ""description"": "">**NOTE:** Detailled documentation on Installation procedure and usage can be found [here](https://aka.ms/MicrosoftExchangeSecurityGithub)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises/Data%20Connectors/ESI-ExchangeOnPremisesCollector.json","true" -"Event","Microsoft Exchange Security - Exchange On-Premises","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises","microsoftsentinelcommunity","azure-sentinel-solution-exchangesecurityinsights","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ESI-Opt1ExchangeAdminAuditLogsByEventLogs","Microsoft","Microsoft Exchange Admin Audit Logs by Event Logs","[Option 1] - Using Azure Monitor Agent - You can stream all Exchange Audit events from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you to view dashboards, create custom alerts, and improve investigation. This is used by Microsoft Exchange Security Workbooks to provide security insights of your On-Premises Exchange environment","[{""description"": "">**NOTE:** This solution is based on options. This allows you to choose which data will be ingest as some options can generate a very high volume of data. Depending on what you want to collect, track in your Workbooks, Analytics Rules, Hunting capabilities you will choose the option(s) you will deploy. Each options are independant for one from the other. To learn more about each option: ['Microsoft Exchange Security' wiki](https://aka.ms/ESI_DataConnectorOptions)\n\n>This Data Connector is the **option 1** of the wiki.""}, {""title"": ""1. Download and install the agents needed to collect logs for Microsoft Sentinel"", ""description"": ""Type of servers (Exchange Servers, Domain Controllers linked to Exchange Servers or all Domain Controllers) depends on the option you want to deploy."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Deploy Monitor Agents"", ""description"": ""This step is required only if it's the first time you onboard your Exchange Servers/Domain Controllers\n**Deploy the Azure Arc Agent**\n> [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. [Option 1] MS Exchange Management Log collection - MS Exchange Admin Audit event logs by Data Collection Rules"", ""description"": ""The MS Exchange Admin Audit event logs are collected using Data Collection Rules (DCR) and allow to store all Administrative Cmdlets executed in an Exchange environment."", ""instructions"": [{""parameters"": {""title"": ""DCR"", ""instructionSteps"": [{""title"": ""Data Collection Rules Deployment"", ""description"": ""**Enable data collection rule**\n> Microsoft Exchange Admin Audit Events logs are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template (Prefered)"", ""description"": ""Use this method for automated deployment of the DCR.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption1-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace Name** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCR, Type Event log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'Windows Event logs' and select 'Custom' option, enter 'MSExchange Management' as expression and Add it.\n6. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. Parsers are automatically deployed with the solution. Follow the steps to create the Kusto Functions alias : [**ExchangeAdminAuditLogs**](https://aka.ms/sentinel-ESI-ExchangeCollector-ExchangeAdminAuditLogs-parser)"", ""instructions"": [{""parameters"": {""title"": ""Parsers are automatically deployed during Solution deployment. If you want to deploy manually, follow the steps below"", ""instructionSteps"": [{""title"": ""Manual Parser Deployment"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""1. Download the Parser file"", ""description"": ""The latest version of the file [**ExchangeAdminAuditLogs**](https://aka.ms/sentinel-ESI-ExchangeCollector-ExchangeAdminAuditLogs-parser)""}, {""title"": ""2. Create Parser **ExchangeAdminAuditLogs** function"", ""description"": ""In 'Logs' explorer of your Microsoft Sentinel's log analytics, copy the content of the file to Log explorer""}, {""title"": ""3. Save Parser **ExchangeAdminAuditLogs** function"", ""description"": ""Click on save button.\n No parameter is needed for this parser.\nClick save again.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""Azure Log Analytics will be deprecated, to collect data from non-Azure VMs, Azure Arc is recommended. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""name"": ""Detailled documentation"", ""description"": "">**NOTE:** Detailled documentation on Installation procedure and usage can be found [here](https://aka.ms/MicrosoftExchangeSecurityGithub)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises/Data%20Connectors/ESI-Opt1ExchangeAdminAuditLogsByEventLogs.json","true" -"Event","Microsoft Exchange Security - Exchange On-Premises","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises","microsoftsentinelcommunity","azure-sentinel-solution-exchangesecurityinsights","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ESI-Opt2ExchangeServersEventLogs","Microsoft","Microsoft Exchange Logs and Events","[Option 2] - Using Azure Monitor Agent - You can stream all Exchange Security & Application Event logs from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you to create custom alerts, and improve investigation.","[{""description"": "">**NOTE:** This solution is based on options. This allows you to choose which data will be ingest as some options can generate a very high volume of data. Depending on what you want to collect, track in your Workbooks, Analytics Rules, Hunting capabilities you will choose the option(s) you will deploy. Each options are independant for one from the other. To learn more about each option: ['Microsoft Exchange Security' wiki](https://aka.ms/ESI_DataConnectorOptions)\n\n>This Data Connector is the **option 2** of the wiki.""}, {""title"": ""1. Download and install the agents needed to collect logs for Microsoft Sentinel"", ""description"": ""Type of servers (Exchange Servers, Domain Controllers linked to Exchange Servers or all Domain Controllers) depends on the option you want to deploy."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Deploy Monitor Agents"", ""description"": ""This step is required only if it's the first time you onboard your Exchange Servers/Domain Controllers\n**Deploy the Azure Arc Agent**\n> [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. [Option 2] Security/Application/System logs of Exchange Servers"", ""description"": ""The Security/Application/System logs of Exchange Servers are collected using Data Collection Rules (DCR)."", ""instructions"": [{""parameters"": {""title"": ""Security Event log collection"", ""instructionSteps"": [{""title"": ""Data Collection Rules - Security Event logs"", ""description"": ""**Enable data collection rule for Security Logs**\nSecurity Events logs are collected only from **Windows** agents.\n1. Add Exchange Servers on *Resources* tab.\n2. Select Security log level\n\n> **Common level** is the minimum required. Please select 'Common' or 'All Security Events' on DCR definition."", ""instructions"": [{""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 0}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""title"": ""Application and System Event log collection"", ""instructionSteps"": [{""title"": ""Enable data collection rule"", ""description"": ""> Application and System Events logs are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template (Prefered method)"", ""description"": ""Use this method for automated deployment of the DCR.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace Name** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCR, Type Event log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'Windows Event logs' and select 'Basic' option.\n6. For Application, select 'Critical', 'Error' and 'Warning'. For System, select Critical/Error/Warning/Information. \n7. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Log Analytics will be deprecated"", ""description"": ""Azure Log Analytics will be deprecated, to collect data from non-Azure VMs, Azure Arc is recommended. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""name"": ""Detailled documentation"", ""description"": "">**NOTE:** Detailled documentation on Installation procedure and usage can be found [here](https://aka.ms/MicrosoftExchangeSecurityGithub)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises/Data%20Connectors/ESI-Opt2ExchangeServersEventLogs.json","true" -"SecurityEvent","Microsoft Exchange Security - Exchange On-Premises","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises","microsoftsentinelcommunity","azure-sentinel-solution-exchangesecurityinsights","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ESI-Opt34DomainControllersSecurityEventLogs","Microsoft"," Microsoft Active-Directory Domain Controllers Security Event Logs","[Option 3 & 4] - Using Azure Monitor Agent -You can stream a part or all Domain Controllers Security Event logs from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you to create custom alerts, and improve investigation.","[{""description"": "">**NOTE:** This solution is based on options. This allows you to choose which data will be ingest as some options can generate a very high volume of data. Depending on what you want to collect, track in your Workbooks, Analytics Rules, Hunting capabilities you will choose the option(s) you will deploy. Each options are independant for one from the other. To learn more about each option: ['Microsoft Exchange Security' wiki](https://aka.ms/ESI_DataConnectorOptions)\n\n>This Data Connector is the **option 3 and 4** of the wiki.""}, {""title"": ""1. Download and install the agents needed to collect logs for Microsoft Sentinel"", ""description"": ""Type of servers (Exchange Servers, Domain Controllers linked to Exchange Servers or all Domain Controllers) depends on the option you want to deploy."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Deploy Monitor Agents"", ""description"": ""This step is required only if it's the first time you onboard your Exchange Servers/Domain Controllers\n**Deploy the Azure Arc Agent**\n> [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Security logs of Domain Controllers"", ""description"": ""Select how to stream Security logs of Domain Controllers. If you want to implement Option 3, you just need to select DC on same site as Exchange Servers. If you want to implement Option 4, you can select all DCs of your forest."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""[Option 3] List only Domain Controllers on the same site as Exchange Servers for next step"", ""description"": ""**This limits the quantity of data injested but some incident can't be detected.**""}, {""title"": ""[Option 4] List all Domain Controllers of your Active-Directory Forest for next step"", ""description"": ""**This allows collecting all security events**""}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""title"": ""Security Event log collection"", ""instructionSteps"": [{""title"": ""Data Collection Rules - Security Event logs"", ""description"": ""**Enable data collection rule for Security Logs**\nSecurity Events logs are collected only from **Windows** agents.\n1. Add chosen DCs on *Resources* tab.\n2. Select Security log level\n\n> **Common level** is the minimum required. Please select 'Common' or 'All Security Events' on DCR definition."", ""instructions"": [{""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 0}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""Azure Log Analytics will be deprecated, to collect data from non-Azure VMs, Azure Arc is recommended. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""name"": ""Detailled documentation"", ""description"": "">**NOTE:** Detailled documentation on Installation procedure and usage can be found [here](https://aka.ms/MicrosoftExchangeSecurityGithub)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises/Data%20Connectors/ESI-Opt34DomainControllersSecurityEventLogs.json","true" -"W3CIISLog","Microsoft Exchange Security - Exchange On-Premises","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises","microsoftsentinelcommunity","azure-sentinel-solution-exchangesecurityinsights","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ESI-Opt5ExchangeIISLogs","Microsoft","IIS Logs of Microsoft Exchange Servers","[Option 5] - Using Azure Monitor Agent - You can stream all IIS Logs from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you to create custom alerts, and improve investigation.","[{""description"": "">**NOTE:** This solution is based on options. This allows you to choose which data will be ingest as some options can generate a very high volume of data. Depending on what you want to collect, track in your Workbooks, Analytics Rules, Hunting capabilities you will choose the option(s) you will deploy. Each options are independant for one from the other. To learn more about each option: ['Microsoft Exchange Security' wiki](https://aka.ms/ESI_DataConnectorOptions)\n\n>This Data Connector is the **option 5** of the wiki.""}, {""title"": ""1. Download and install the agents needed to collect logs for Microsoft Sentinel"", ""description"": ""Type of servers (Exchange Servers, Domain Controllers linked to Exchange Servers or all Domain Controllers) depends on the option you want to deploy."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Deploy Monitor Agents"", ""description"": ""This step is required only if it's the first time you onboard your Exchange Servers/Domain Controllers\n**Deploy the Azure Arc Agent**\n> [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""[Option 5] IIS logs of Exchange Servers"", ""description"": ""Select how to stream IIS logs of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Enable data collection rule"", ""description"": ""> IIS logs are collected only from **Windows** agents."", ""instructions"": [{""type"": ""AdminAuditEvents""}, {""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template (Preferred Method)"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption5-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create DCR, Type IIS log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. Select the created DCE. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'IIS logs' (Do not enter a path if IIS Logs path is configured by default). Click on 'Add data source'\n6. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""Azure Log Analytics will be deprecated, to collect data from non-Azure VMs, Azure Arc is recommended. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""name"": ""Detailled documentation"", ""description"": "">**NOTE:** Detailled documentation on Installation procedure and usage can be found [here](https://aka.ms/MicrosoftExchangeSecurityGithub)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises/Data%20Connectors/ESI-Opt5ExchangeIISLogs.json","true" -"MessageTrackingLog_CL","Microsoft Exchange Security - Exchange On-Premises","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises","microsoftsentinelcommunity","azure-sentinel-solution-exchangesecurityinsights","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ESI-Opt6ExchangeMessageTrackingLogs","Microsoft","Microsoft Exchange Message Tracking Logs","[Option 6] - Using Azure Monitor Agent - You can stream all Exchange Message Tracking from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. Those logs can be used to track the flow of messages in your Exchange environment. This data connector is based on the option 6 of the [Microsoft Exchange Security wiki](https://aka.ms/ESI_DataConnectorOptions).","[{""description"": "">**NOTE:** This solution is based on options. This allows you to choose which data will be ingest as some options can generate a very high volume of data. Depending on what you want to collect, track in your Workbooks, Analytics Rules, Hunting capabilities you will choose the option(s) you will deploy. Each options are independant for one from the other. To learn more about each option: ['Microsoft Exchange Security' wiki](https://aka.ms/ESI_DataConnectorOptions)\n\n>This Data Connector is the **option 6** of the wiki.""}, {""title"": ""1. Download and install the agents needed to collect logs for Microsoft Sentinel"", ""description"": ""Type of servers (Exchange Servers, Domain Controllers linked to Exchange Servers or all Domain Controllers) depends on the option you want to deploy."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Deploy Monitor Agents"", ""description"": ""This step is required only if it's the first time you onboard your Exchange Servers/Domain Controllers\n**Deploy the Azure Arc Agent**\n> [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Message Tracking of Exchange Servers"", ""description"": ""Select how to stream Message Tracking of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Message Tracking are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule and Custom Table"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption6-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Create Custom Table - Explanation"", ""description"": ""The Custom Table can't be created using the Azure Portal. You need to use an ARM template, a PowerShell Script or another method [described here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/create-custom-table?tabs=azure-powershell-1%2Cazure-portal-2%2Cazure-portal-3#create-a-custom-table).""}, {""title"": ""Create Custom Table using an ARM Template"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-MessageTrackingCustomTable)\n2. Select the preferred **Subscription**, **Resource Group**, **Location** and **Analytic Workspace Name**. \n3. Click **Create** to deploy.""}, {""title"": ""Create Custom Table using PowerShell in Cloud Shell"", ""description"": ""1. From the Azure Portal, open a Cloud Shell.\n2. Copy and paste and Execute the following script in the Cloud Shell to create the table.\n\t\t$tableParams = @'\n\t\t{\n\t\t\t\""properties\"": {\n\t\t\t\t\""schema\"": {\n\t\t\t\t\t \""name\"": \""MessageTrackingLog_CL\"",\n\t\t\t\t\t \""columns\"": [\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""directionality\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""reference\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""source\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""TimeGenerated\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""datetime\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""clientHostname\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""clientIP\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""connectorId\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""customData\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""eventId\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""internalMessageId\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""logId\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""messageId\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""messageInfo\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""messageSubject\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""networkMessageId\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""originalClientIp\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""originalServerIp\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""recipientAddress\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""recipientCount\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""recipientStatus\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""relatedRecipientAddress\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""returnPath\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""senderAddress\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""senderHostname\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""serverIp\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""sourceContext\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""schemaVersion\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""messageTrackingTenantId\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""totalBytes\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""transportTrafficType\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""FilePath\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t]\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\t'@\n3. Copy, Replace, Paste and execute the following parameters with your own values:\n\t\t$SubscriptionID = 'YourGUID'\n\t\t$ResourceGroupName = 'YourResourceGroupName'\n\t\t$WorkspaceName = 'YourWorkspaceName'\n4. Execute the Following Cmdlet to create the table:\n\t\tInvoke-AzRestMethod -Path \""/subscriptions/$SubscriptionID/resourcegroups/$ResourceGroupName/providers/microsoft.operationalinsights/workspaces/$WorkspaceName/tables/MessageTrackingLog_CL?api-version=2021-12-01-preview\"" -Method PUT -payload $tableParams""}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE, like ESI-ExchangeServers. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create a DCR, Type Custom log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click on 'Create' button.\n3. On 'Basics' tab, fill the Rule name like **DCR-Option6-MessageTrackingLogs**, select the 'Data Collection Endpoint' with the previously created endpoint and fill other parameters.\n4. In the **Resources** tab, add your Exchange Servers.\n5. In **Collect and Deliver**, add a Data Source type 'Custom Text logs' and enter 'C:\\Program Files\\Microsoft\\Exchange Server\\V15\\TransportRoles\\Logs\\MessageTracking\\*.log' in file pattern, 'MessageTrackingLog_CL' in Table Name.\n6.in Transform field, enter the following KQL request :\n\t\tsource | extend d = split(RawData,',') | extend TimeGenerated =todatetime(d[0]) ,clientIP =tostring(d[1]) ,clientHostname =tostring(d[2]) ,serverIp=tostring(d[3]) ,senderHostname=tostring(d[4]) ,sourceContext=tostring(d[5]) ,connectorId =tostring(d[6]) ,source=tostring(d[7]) ,eventId =tostring(d[8]) ,internalMessageId =tostring(d[9]) ,messageId =tostring(d[10]) ,networkMessageId =tostring(d[11]) ,recipientAddress=tostring(d[12]) ,recipientStatus=tostring(d[13]) ,totalBytes=tostring(d[14]) ,recipientCount=tostring(d[15]) ,relatedRecipientAddress=tostring(d[16]) ,reference=tostring(d[17]) ,messageSubject =tostring(d[18]) ,senderAddress=tostring(d[19]) ,returnPath=tostring(d[20]) ,messageInfo =tostring(d[21]) ,directionality=tostring(d[22]) ,messageTrackingTenantId =tostring(d[23]) ,originalClientIp =tostring(d[24]) ,originalServerIp =tostring(d[25]) ,customData=tostring(d[26]) ,transportTrafficType =tostring(d[27]) ,logId =tostring(d[28]) ,schemaVersion=tostring(d[29]) | project-away d,RawData\n and click on 'Destination'.\n6. In 'Destination', add a destination and select the Workspace where you have previously created the Custom Table \n7. Click on 'Add data source'.\n8. Fill other required parameters and tags and create the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Log Analytics will be deprecated"", ""description"": ""Azure Log Analytics will be deprecated, to collect data from non-Azure VMs, Azure Arc is recommended. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""name"": ""Detailled documentation"", ""description"": "">**NOTE:** Detailled documentation on Installation procedure and usage can be found [here](https://aka.ms/MicrosoftExchangeSecurityGithub)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises/Data%20Connectors/ESI-Opt6ExchangeMessageTrackingLogs.json","true" -"ExchangeHttpProxy_CL","Microsoft Exchange Security - Exchange On-Premises","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises","microsoftsentinelcommunity","azure-sentinel-solution-exchangesecurityinsights","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ESI-Opt7ExchangeHTTPProxyLogs","Microsoft","Microsoft Exchange HTTP Proxy Logs","[Option 7] - Using Azure Monitor Agent - You can stream HTTP Proxy logs and Security Event logs from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you create custom alerts, and improve investigation. [Learn more](https://aka.ms/ESI_DataConnectorOptions)","[{""description"": "">**NOTE:** This solution is based on options. This allows you to choose which data will be ingest as some options can generate a very high volume of data. Depending on what you want to collect, track in your Workbooks, Analytics Rules, Hunting capabilities you will choose the option(s) you will deploy. Each options are independant for one from the other. To learn more about each option: ['Microsoft Exchange Security' wiki](https://aka.ms/ESI_DataConnectorOptions)\n\n>This Data Connector is the **option 7** of the wiki.""}, {""title"": ""1. Download and install the agents needed to collect logs for Microsoft Sentinel"", ""description"": ""Type of servers (Exchange Servers, Domain Controllers linked to Exchange Servers or all Domain Controllers) depends on the option you want to deploy."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Deploy Monitor Agents"", ""description"": ""This step is required only if it's the first time you onboard your Exchange Servers/Domain Controllers\n**Deploy the Azure Arc Agent**\n> [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. [Option 7] HTTP Proxy of Exchange Servers"", ""description"": ""Select how to stream HTTP Proxy of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Message Tracking are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template (Prefered Method)"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption7-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Create Custom Table - Explanation"", ""description"": ""The Custom Table can't be created using the Azure Portal. You need to use an ARM template, a PowerShell Script or another method [described here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/create-custom-table?tabs=azure-powershell-1%2Cazure-portal-2%2Cazure-portal-3#create-a-custom-table).""}, {""title"": ""Create Custom Table using an ARM Template"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-HTTPProxyCustomTable)\n2. Select the preferred **Subscription**, **Resource Group**, **Location** and **Analytic Workspace Name**. \n3. Click **Create** to deploy.""}, {""title"": ""Create Custom Table using PowerShell in Cloud Shell"", ""description"": ""1. From the Azure Portal, open a Cloud Shell.\n2. Copy and paste and Execute the following script in the Cloud Shell to create the table.\n\t\t$tableParams = @'\n\t\t{\n\t\t\t\""properties\"": {\n\t\t\t\t \""schema\"": {\n\t\t\t\t\t\t\""name\"": \""ExchangeHttpProxy_CL\"",\n\t\t\t\t\t\t\""columns\"": [\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""AccountForestLatencyBreakup\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""ActivityContextLifeTime\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""ADLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""AnchorMailbox\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""AuthenticatedUser\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""AuthenticationType\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""AuthModulePerfContext\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""BackEndCookie\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""BackEndGenericInfo\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""BackendProcessingLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""BackendReqInitLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""BackendReqStreamLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""BackendRespInitLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""BackendRespStreamLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""BackEndStatus\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""BuildVersion\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""CalculateTargetBackEndLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""ClientIpAddress\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""ClientReqStreamLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""ClientRequestId\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""ClientRespStreamLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""CoreLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""DatabaseGuid\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""EdgeTraceId\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""ErrorCode\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""GenericErrors\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""GenericInfo\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""GlsLatencyBreakup\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""HandlerCompletionLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""HandlerToModuleSwitchingLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""HttpPipelineLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""HttpProxyOverhead\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""HttpStatus\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""IsAuthenticated\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""KerberosAuthHeaderLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""MajorVersion\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""Method\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""MinorVersion\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""ModuleToHandlerSwitchingLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""Organization\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""PartitionEndpointLookupLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""Protocol\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""ProtocolAction\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""ProxyAction\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""ProxyTime\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""RequestBytes\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""RequestHandlerLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""RequestId\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""ResourceForestLatencyBreakup\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""ResponseBytes\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""RevisionVersion\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""RouteRefresherLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""RoutingHint\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""RoutingLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""RoutingStatus\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""RoutingType\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""ServerHostName\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""ServerLocatorHost\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""ServerLocatorLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""SharedCacheLatencyBreakup\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""TargetOutstandingRequests\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""TargetServer\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""TargetServerVersion\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""TotalAccountForestLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""TotalGlsLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""TotalRequestTime\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""TotalResourceForestLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""TotalSharedCacheLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""UrlHost\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""UrlQuery\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""UrlStem\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""UserADObjectGuid\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""UserAgent\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""TimeGenerated\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""datetime\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""FilePath\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t]\n\t\t\t\t }\n\t\t\t }\n\t\t }\n\t\t '@\n3. Copy, Replace, Paste and execute the following parameters with your own values:\n\t\t$SubscriptionID = 'YourGUID'\n\t\t$ResourceGroupName = 'YourResourceGroupName'\n\t\t$WorkspaceName = 'YourWorkspaceName'\n4. Execute the Following Cmdlet to create the table:\n\t\tInvoke-AzRestMethod -Path \""/subscriptions/$SubscriptionID/resourcegroups/$ResourceGroupName/providers/microsoft.operationalinsights/workspaces/$WorkspaceName/tables/ExchangeHttpProxy_CL?api-version=2021-12-01-preview\"" -Method PUT -payload $tableParams""}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create a DCR, Type Custom log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click on 'Create' button.\n3. On 'Basics' tab, fill the Rule name like **DCR-Option7-HTTPProxyLogs**, select the 'Data Collection Endpoint' with the previously created endpoint and fill other parameters.\n4. In the **Resources** tab, add your Exchange Servers.\n5. In **Collect and Deliver**, add a Data Source type 'Custom Text logs' and enter the following file pattern : \n\t\t'C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Autodiscover\\*.log','C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Eas\\*.log','C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Ecp\\*.log','C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Ews\\*.log','C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Mapi\\*.log','C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Oab\\*.log','C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Owa\\*.log','C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\OwaCalendar\\*.log','C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\PowerShell\\*.log','C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\RpcHttp\\*.log'\n6. Put 'ExchangeHttpProxy_CL' in Table Name.\n7. in Transform field, enter the following KQL request :\n\t\tsource | extend d = split(RawData,',') | extend DateTime=todatetime(d[0]),RequestId=tostring(d[1]) ,MajorVersion=tostring(d[2]) ,MinorVersion=tostring(d[3]) ,BuildVersion=tostring(d[4]) ,RevisionVersion=tostring(d[5]) ,ClientRequestId=tostring(d[6]) ,Protocol=tostring(d[7]) ,UrlHost=tostring(d[8]) ,UrlStem=tostring(d[9]) ,ProtocolAction=tostring(d[10]) ,AuthenticationType=tostring(d[11]) ,IsAuthenticated=tostring(d[12]) ,AuthenticatedUser=tostring(d[13]) ,Organization=tostring(d[14]) ,AnchorMailbox=tostring(d[15]) ,UserAgent=tostring(d[16]) ,ClientIpAddress=tostring(d[17]) ,ServerHostName=tostring(d[18]) ,HttpStatus=tostring(d[19]) ,BackEndStatus=tostring(d[20]) ,ErrorCode=tostring(d[21]) ,Method=tostring(d[22]) ,ProxyAction=tostring(d[23]) ,TargetServer=tostring(d[24]) ,TargetServerVersion=tostring(d[25]) ,RoutingType=tostring(d[26]) ,RoutingHint=tostring(d[27]) ,BackEndCookie=tostring(d[28]) ,ServerLocatorHost=tostring(d[29]) ,ServerLocatorLatency=tostring(d[30]) ,RequestBytes=tostring(d[31]) ,ResponseBytes=tostring(d[32]) ,TargetOutstandingRequests=tostring(d[33]) ,AuthModulePerfContext=tostring(d[34]) ,HttpPipelineLatency=tostring(d[35]) ,CalculateTargetBackEndLatency=tostring(d[36]) ,GlsLatencyBreakup=tostring(d[37]) ,TotalGlsLatency=tostring(d[38]) ,AccountForestLatencyBreakup=tostring(d[39]) ,TotalAccountForestLatency=tostring(d[40]) ,ResourceForestLatencyBreakup=tostring(d[41]) ,TotalResourceForestLatency=tostring(d[42]) ,ADLatency=tostring(d[43]) ,SharedCacheLatencyBreakup=tostring(d[44]) ,TotalSharedCacheLatency=tostring(d[45]) ,ActivityContextLifeTime=tostring(d[46]) ,ModuleToHandlerSwitchingLatency=tostring(d[47]) ,ClientReqStreamLatency=tostring(d[48]) ,BackendReqInitLatency=tostring(d[49]) ,BackendReqStreamLatency=tostring(d[50]) ,BackendProcessingLatency=tostring(d[51]) ,BackendRespInitLatency=tostring(d[52]) ,BackendRespStreamLatency=tostring(d[53]) ,ClientRespStreamLatency=tostring(d[54]) ,KerberosAuthHeaderLatency=tostring(d[55]) ,HandlerCompletionLatency=tostring(d[56]) ,RequestHandlerLatency=tostring(d[57]) ,HandlerToModuleSwitchingLatency=tostring(d[58]) ,ProxyTime=tostring(d[59]) ,CoreLatency=tostring(d[60]) ,RoutingLatency=tostring(d[61]) ,HttpProxyOverhead=tostring(d[62]) ,TotalRequestTime=tostring(d[63]) ,RouteRefresherLatency=tostring(d[64]) ,UrlQuery=tostring(d[65]) ,BackEndGenericInfo=tostring(d[66]) ,GenericInfo=tostring(d[67]) ,GenericErrors=tostring(d[68]) ,EdgeTraceId=tostring(d[69]) ,DatabaseGuid=tostring(d[70]) ,UserADObjectGuid=tostring(d[71]) ,PartitionEndpointLookupLatency=tostring(d[72]) ,RoutingStatus=tostring(d[73]) | extend TimeGenerated = DateTime | project-away d,RawData,DateTime | project-away d,RawData,DateTime\n and click on 'Destination'.\n8. In 'Destination', add a destination and select the Workspace where you have previously created the Custom Table \n9. Click on 'Add data source'.\n10. Fill other required parameters and tags and create the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Log Analytics will be deprecated"", ""description"": ""Azure Log Analytics will be deprecated, to collect data from non-Azure VMs, Azure Arc is recommended. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""name"": ""Detailled documentation"", ""description"": "">**NOTE:** Detailled documentation on Installation procedure and usage can be found [here](https://aka.ms/MicrosoftExchangeSecurityGithub)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises/Data%20Connectors/ESI-Opt7ExchangeHTTPProxyLogs.json","true" -"","Microsoft Exchange Security - Exchange Online","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20Online","microsoftsentinelcommunity","azure-sentinel-solution-esionline","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","","","","","","","false","","false" -"ESIExchangeOnlineConfig_CL","Microsoft Exchange Security - Exchange Online","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20Online","microsoftsentinelcommunity","azure-sentinel-solution-esionline","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ESI-ExchangeOnlineCollector","Microsoft","Exchange Security Insights Online Collector","Connector used to push Exchange Online Security configuration for Microsoft Sentinel Analysis","[{""description"": "">**NOTE - UPDATE**"", ""instructions"": [{""parameters"": {""text"": ""

NOTE - UPDATE:

We recommend to Update the Collector to Version 7.6.0.0 or highier.
The Collector Script Update procedure could be found here : ESI Online Collector Update"", ""visible"": true, ""inline"": false}, ""type"": ""InfoMessage""}]}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. Follow the steps for each Parser to create the Kusto Functions alias : [**ExchangeConfiguration**](https://aka.ms/sentinel-ESI-ExchangeConfiguration-Online-parser) and [**ExchangeEnvironmentList**](https://aka.ms/sentinel-ESI-ExchangeEnvironmentList-Online-parser) \n\n**STEP 1 - Parsers deployment**"", ""instructions"": [{""parameters"": {""title"": ""Parser deployment (When using Microsoft Exchange Security Solution, Parsers are automatically deployed)"", ""instructionSteps"": [{""title"": ""1. Download the Parser files"", ""description"": ""The latest version of the 2 files [**ExchangeConfiguration.yaml**](https://aka.ms/sentinel-ESI-ExchangeConfiguration-Online-parser) and [**ExchangeEnvironmentList.yaml**](https://aka.ms/sentinel-ESI-ExchangeEnvironmentList-Online-parser)""}, {""title"": ""2. Create Parser **ExchangeConfiguration** function"", ""description"": ""In 'Logs' explorer of your Microsoft Sentinel's log analytics, copy the content of the file to Log explorer""}, {""title"": ""3. Save Parser **ExchangeConfiguration** function"", ""description"": ""Click on save button.\n Define the parameters as asked on the header of the parser file.\nClick save again.""}, {""title"": ""4. Reproduce the same steps for Parser **ExchangeEnvironmentList**"", ""description"": ""Reproduce the step 2 and 3 with the content of 'ExchangeEnvironmentList.yaml' file""}]}, ""type"": ""InstructionStepsGroup""}]}, {""description"": "">**NOTE:** This connector uses Azure Automation to connect to 'Exchange Online' to pull its Security analysis into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Automation pricing page](https://azure.microsoft.com/pricing/details/automation/) for details.""}, {""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Automation**\n\n>**IMPORTANT:** Before deploying the 'ESI Exchange Online Security Configuration' connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Exchange Online tenant name (contoso.onmicrosoft.com), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""**Option 1 - Azure Resource Manager (ARM) Template**\n\nUse this method for automated deployment of the 'ESI Exchange Online Security Configuration' connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-ExchangeCollector-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **Tenant Name**, 'and/or Other required fields'. \n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""description"": ""**Option 2 - Manual Deployment of Azure Automation**\n\n Use the following step-by-step instructions to deploy the 'ESI Exchange Online Security Configuration' connector manually with Azure Automation."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create the Azure Automation Account"", ""description"": ""1. From the Azure Portal, navigate to [Azure Automation Account](https://portal.azure.com/#view/HubsExtension/BrowseResource/resourceType/Microsoft.Automation%2FAutomationAccounts).\n2. Click **+ Add** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the Azure Automation. \n4. In the **Advanced** and **Networking** and **Tags** Tabs, leave fields as default if you don't need to customize them.\n5. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Add Exchange Online Management Module, Microsoft Graph (Authentication, User and Group) Modules"", ""description"": ""1. On the Automation Account page, select **Modules**.\n2. Click on **Browse gallery** and search the **ExchangeOnlineManagement** module.\n3. Select it and click on **Select**.\n4. Choose Version **5.1** on Runtime version field and click on Import button.\nRepeat the step for the following modules : 'Microsoft.Graph.Authentication', 'Microsoft.Graph.Users' and 'Microsoft.Graph.Groups. **Attention, you need to wait for Microsoft.Graph.Authentication installation before processing next modules**""}, {""title"": ""C. Download the Runbook Content"", ""description"": ""1. Download the latest version of ESI Collector. The latest version can be found here : https://aka.ms/ESI-ExchangeCollector-Script\n2. Unzip the file to find the JSON file and the PS1 file for next step.\n""}, {""title"": ""D. Create Runbook"", ""description"": ""1. On the Automation Account page, select the **Runbooks** button.\n2. Click on **Create a runbook** and name it like 'ESI-Collector' with a runbook type **PowerShell**, Runtime Version **5.1** and click 'Create'.\n2. Import the content of the previous step's PS1 file in the Runbook window.\n3. Click on **Publish**""}, {""title"": ""E. Create GlobalConfiguration Variable"", ""description"": ""1. On the Automation Account page, select the **Variables** button.\n2. Click on **Add a Variable** and name it exaclty 'GlobalConfiguration' with a type **String**.\n2. On 'Value' field, copy the content of the previous step's JSON file.\n3. Inside the content, replace the values of **WorkspaceID** and **WorkspaceKey**.\n4. Click on 'Create' button.""}, {""title"": ""F. Create TenantName Variable"", ""description"": ""1. On the Automation Account page, select the **Variables** button.\n2. Click on **Add a Variable** and name it exaclty 'TenantName' with a type **String**.\n3. On 'Value' field, write the tenant name of your Exchange Online.\n4. Click on 'Create' button.""}, {""title"": ""G. Create LastDateTracking Variable"", ""description"": ""1. On the Automation Account page, select the **Variables** button.\n2. Click on **Add a Variable** and name it exaclty 'LastDateTracking' with a type **String**.\n3. On 'Value' field, write 'Never'.\n4. Click on 'Create' button.""}, {""title"": ""H. Create a Runbook Schedule"", ""description"": ""1. On the Automation Account page, select the **Runbook** button and click on your created runbook.\n2. Click on **Schedules** and **Add a schedule** button.\n3. Click on **Schedule**, **Add a Schedule** and name it. Select **Recurring** value with a reccurence of every 1 day, click 'Create'.\n4. Click on 'Configure parameters and run settings'. Leave all empty and click on **OK** and **OK** again.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""description"": ""**STEP 3 - Assign Microsoft Graph Permission and Exchange Online Permission to Managed Identity Account** \n\nTo be able to collect Exchange Online information and to be able to retrieve User information and memberlist of admin groups, the automation account need multiple permission."", ""instructions"": [{""parameters"": {""title"": ""Assign Permissions by Script"", ""instructionSteps"": [{""title"": ""A. Download Permission Script"", ""description"": ""[Permission Update script](https://aka.ms/ESI-ExchangeCollector-Permissions)""}, {""title"": ""B. Retrieve the Azure Automation Managed Identity GUID and insert it in the downloaded script"", ""description"": ""1. Go to your Automation Account, in the **Identity** Section. You can find the Guid of your Managed Identity.\n2. Replace the GUID in $MI_ID = \""XXXXXXXXXXX\"" with the GUID of your Managed Identity.""}, {""title"": ""C. Launch the script with a **Global-Administrator** account"", ""description"": ""**Attention this script requires MSGraph Modules and Admin Consent to access to your tenant with Microsoft Graph**.\n\tThe script will add 3 permissions to the Managed identity:\n\t1. Exchange Online ManageAsApp permission\n\t2. User.Read.All on Microsoft Graph API\n\t3. Group.Read.All on Microsoft Graph API""}, {""title"": ""D. Exchange Online Role Assignment"", ""description"": ""1. As a **Global Administrator**, go to **Roles and Administrators**.\n2. Select **Global Reader** role or **Security Reader** and click to 'Add assignments'.\n3. Click on 'No member selected' and search your Managed Identity account Name beginning by **the name of your automation account** like 'ESI-Collector'. Select it and click on 'Select'.\n4. Click **Next** and validate the assignment by clicking **Assign**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""microsoft.automation/automationaccounts permissions"", ""description"": ""Read and write permissions to create an Azure Automation with a Runbook is required. [See the documentation to learn more about Automation Account](https://learn.microsoft.com/en-us/azure/automation/overview).""}, {""name"": ""Microsoft.Graph permissions"", ""description"": ""Groups.Read, Users.Read and Auditing.Read permissions are required to retrieve user/group information linked to Exchange Online assignments. [See the documentation to learn more](https://aka.ms/sentinel-ESI-OnlineCollectorPermissions).""}, {""name"": ""Exchange Online permissions"", ""description"": ""Exchange.ManageAsApp permission and **Global Reader** or **Security Reader** Role are needed to retrieve the Exchange Online Security Configuration.[See the documentation to learn more](https://aka.ms/sentinel-ESI-OnlineCollectorPermissions).""}, {""name"": ""(Optional) Log Storage permissions"", ""description"": ""Storage Blob Data Contributor to a storage account linked to the Automation Account Managed identity or an Application ID is mandatory to store logs.[See the documentation to learn more](https://aka.ms/sentinel-ESI-OnlineCollectorPermissions).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20Online/Data%20Connectors/ESI-ExchangeOnlineCollector.json","true" -"","Microsoft PowerBI","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20PowerBI","azuresentinel","azure-sentinel-solution-microsoftpowerbi","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"PowerBIActivity","Microsoft PowerBI","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20PowerBI","azuresentinel","azure-sentinel-solution-microsoftpowerbi","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OfficePowerBI","Microsoft","Microsoft PowerBI","Microsoft PowerBI is a collection of software services, apps, and connectors that work together to turn your unrelated sources of data into coherent, visually immersive, and interactive insights. Your data may be an Excel spreadsheet, a collection of cloud-based and on-premises hybrid data warehouses, or a data store of some other type. This connector lets you stream PowerBI audit logs into Microsoft Sentinel, allowing you to track user activities in your PowerBI environment. You can filter the audit data by date range, user, dashboard, report, dataset, and activity type.","[{""title"": ""Connect Microsoft PowerBI audit logs to Microsoft Sentinel"", ""description"": ""This connector uses the Office Management API to get your PowerBI audit logs. The logs will be stored and processed in your existing Microsoft Sentinel workspace. You can find the data in the **PowerBIActivity** table."", ""instructions"": [{""parameters"": {""connectorKind"": ""OfficePowerBI"", ""title"": ""Microsoft PowerBI"", ""enable"": true}, ""type"": ""SentinelResourceProvider""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""tenant"": [""GlobalAdmin"", ""SecurityAdmin""], ""customs"": [{""name"": ""License"", ""description"": ""Microsoft Power BI eligible license is required.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20PowerBI/Data%20Connectors/template_OfficePowerBI.json","true" -"","Microsoft Project","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Project","azuresentinel","azure-sentinel-solution-microsoftproject","2022-05-23","","","Microsoft","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"ProjectActivity","Microsoft Project","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Project","azuresentinel","azure-sentinel-solution-microsoftproject","2022-05-23","","","Microsoft","Microsoft","https://support.microsoft.com","","domains","Office365Project","Microsoft","Microsoft Project","Microsoft Project (MSP) is a project management software solution. Depending on your plan, Microsoft Project lets you plan projects, assign tasks, manage resources, create reports and more. This connector allows you to stream your Azure Project audit logs into Microsoft Sentinel in order to track your project activities.","[{""title"": ""Connect Microsoft Project audit logs to Microsoft Sentinel"", ""description"": ""This connector uses the Office Management API to get your Project audit logs. The logs will be stored and processed in your existing Microsoft Sentinel workspace. You can find the data in the **ProjectActivity** table."", ""instructions"": [{""parameters"": {""connectorKind"": ""Office365Project"", ""title"": ""Microsoft Project"", ""enable"": true}, ""type"": ""SentinelResourceProvider""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""tenant"": [""GlobalAdmin"", ""SecurityAdmin""], ""customs"": [{""name"": ""License"", ""description"": ""\""Microsoft Project eligible license is required.\""""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Project/Data%20Connectors/template_Office365Project.JSON","true" -"","Microsoft Purview","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Purview","azuresentinel","azure-sentinel-solution-azurepurview","2021-11-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"","Microsoft Purview Information Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Purview%20Information%20Protection","azuresentinel","azure-sentinel-solution-mip","2023-01-06","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"MicrosoftPurviewInformationProtection","Microsoft Purview Information Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Purview%20Information%20Protection","azuresentinel","azure-sentinel-solution-mip","2023-01-06","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MicrosoftPurviewInformationProtection","Microsoft","Microsoft Purview Information Protection","Microsoft Purview Information Protection helps you discover, classify, protect, and govern sensitive information wherever it lives or travels. Using these capabilities enable you to know your data, identify items that are sensitive and gain visibility into how they are being used to better protect your data. Sensitivity labels are the foundational capability that provide protection actions, applying encryption, access restrictions and visual markings.
Integrate Microsoft Purview Information Protection logs with Microsoft Sentinel to view dashboards, create custom alerts and improve investigation. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2223811&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Connect Microsoft Purview Information Protection audit logs to Microsoft Sentinel"", ""instructions"": [{""parameters"": {""connectorKind"": ""MicrosoftPurviewInformationProtection"", ""title"": """", ""enable"": true}, ""type"": ""SentinelResourceProvider""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""tenant"": [""GlobalAdmin"", ""SecurityAdmin""], ""customs"": [{""name"": ""License"", ""description"": ""Enterprise Mobility + Security E5/A5 or Microsoft 365 E5/A5 or P2""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Purview%20Information%20Protection/Data%20Connectors/MicrosoftPurviewInformationProtection.json","true" -"","Microsoft Sysmon For Linux","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Sysmon%20For%20Linux","azuresentinel","azure-sentinel-solution-sysmonforlinux","2021-10-27","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"Syslog","Microsoft Sysmon For Linux","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Sysmon%20For%20Linux","azuresentinel","azure-sentinel-solution-sysmonforlinux","2021-10-27","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MicrosoftSysmonForLinux","Microsoft","[Deprecated] Microsoft Sysmon For Linux","[Sysmon for Linux](https://github.com/Sysinternals/SysmonForLinux) provides detailed information about process creations, network connections and other system events.
[Sysmon for linux link:]. The Sysmon for Linux connector uses [Syslog](https://aka.ms/sysLogInfo) as its data ingestion method. This solution depends on ASIM to work as expected. [Deploy ASIM](https://aka.ms/DeployASIM) to get the full value from the solution.","[{""title"": """", ""description"": "">This data connector depends on ASIM parsers based on a Kusto Functions to work as expected. [Deploy the parsers](https://aka.ms/ASimSysmonForLinuxARM) \n\n The following functions will be deployed:\n\n - vimFileEventLinuxSysmonFileCreated, vimFileEventLinuxSysmonFileDeleted\n\n - vimProcessCreateLinuxSysmon, vimProcessTerminateLinuxSysmon\n\n - vimNetworkSessionLinuxSysmon \n\n[Read more](https://aka.ms/AboutASIM)"", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n2. Select **Apply below configuration to my machines** and select the facilities and severities.\n3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Sysmon%20For%20Linux/Data%20Connectors/SysmonForLinux.json","true" -"","Microsoft Windows SQL Server Database Audit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Windows%20SQL%20Server%20Database%20Audit","microsoftsentinelcommunity","azure-sentinel-solution-sqlserverdatabaseaudit","2022-11-29","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","","","","","","","false","","false" -"","MicrosoftDefenderForEndpoint","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MicrosoftDefenderForEndpoint","azuresentinel","azure-sentinel-solution-microsoftdefenderendpoint","2022-01-31","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"SecurityAlert","MicrosoftDefenderForEndpoint","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MicrosoftDefenderForEndpoint","azuresentinel","azure-sentinel-solution-microsoftdefenderendpoint","2022-01-31","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MicrosoftDefenderAdvancedThreatProtection","Microsoft","Microsoft Defender for Endpoint","Microsoft Defender for Endpoint is a security platform designed to prevent, detect, investigate, and respond to advanced threats. The platform creates alerts when suspicious security events are seen in an organization. Fetch alerts generated in Microsoft Defender for Endpoint to Microsoft Sentinel so that you can effectively analyze security events. You can create rules, build dashboards and author playbooks for immediate response. For more information, see the [Microsoft Sentinel documentation >](https://go.microsoft.com/fwlink/p/?linkid=2220128&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Connect Microsoft Defender for Endpoint alerts to Microsoft Sentinel"", ""description"": ""> Connecting Microsoft Defender for Endpoint will cause your data that is collected by Microsoft Defender for Endpoint service to be stored and processed in the location that you have configured your Microsoft Sentinel workspace."", ""instructions"": [{""parameters"": {}, ""type"": ""MicrosoftDefenderATP""}, {""parameters"": {""text"": ""Microsoft Defender for Endpoint Advanced Hunting raw logs are available as part of the Microsoft 365 Defender (Preview) connector"", ""visible"": true, ""inline"": true}, ""type"": ""InfoMessage""}]}]","{""tenant"": [""GlobalAdmin"", ""SecurityAdmin""], ""licenses"": [""Mdatp""], ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MicrosoftDefenderForEndpoint/Data%20Connectors/template_MicrosoftDefenderAdvancedThreatProtection.JSON","true" -"","MicrosoftPurviewInsiderRiskManagement","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MicrosoftPurviewInsiderRiskManagement","azuresentinel","azure-sentinel-solution-insiderriskmanagement","2021-10-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"SecurityAlert","MicrosoftPurviewInsiderRiskManagement","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MicrosoftPurviewInsiderRiskManagement","azuresentinel","azure-sentinel-solution-insiderriskmanagement","2021-10-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OfficeIRM","Microsoft","Microsoft 365 Insider Risk Management","Microsoft 365 Insider Risk Management is a compliance solution in Microsoft 365 that helps minimize internal risks by enabling you to detect, investigate, and act on malicious and inadvertent activities in your organization. Risk analysts in your organization can quickly take appropriate actions to make sure users are compliant with your organization's compliance standards.

Insider risk policies allow you to:

- define the types of risks you want to identify and detect in your organization.
- decide on what actions to take in response, including escalating cases to Microsoft Advanced eDiscovery if needed.

This solution produces alerts that can be seen by Office customers in the Insider Risk Management solution in Microsoft 365 Compliance Center.
[Learn More](https://aka.ms/OfficeIRMConnector) about Insider Risk Management.

These alerts can be imported into Microsoft Sentinel with this connector, allowing you to see, investigate, and respond to them in a broader organizational threat context. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2223721&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Connect Microsoft 365 Insider Risk Management alerts to Microsoft Sentinel"", ""description"": ""Connecting Microsoft 365 Insider Risk Management will cause your data that is collected by Microsoft 365 Insider Risk Management service to be stored and processed in the location that you have configured your Microsoft Sentinel workspace."", ""instructions"": [{""parameters"": {""connectorKind"": ""OfficeIRM"", ""title"": ""Microsoft 365 Insider Risk Management"", ""enable"": true}, ""type"": ""SentinelResourceProvider""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""tenant"": [""GlobalAdmin"", ""SecurityAdmin""], ""licenses"": [""OfficeIRM""], ""customs"": [{""description"": ""Enable export of Insider Risk Management alerts to the Office 365 Management Activity API to receive alerts through the connector. [Learn More](https://aka.ms/OfficeIRMRequirement)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MicrosoftPurviewInsiderRiskManagement/Data%20Connectors/template_OfficeIRM.JSON","true" -"","Mimecast","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecast","2024-09-10","2024-09-10","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","","","","","","","false","","false" -"Awareness_Performance_Details_CL","Mimecast","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecast","2024-09-10","2024-09-10","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastATAPI","Mimecast","Mimecast Awareness Training","The data connector for [Mimecast Awareness Training](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) provides customers with the visibility into security events related to the Targeted Threat Protection inspection technologies within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities.
The Mimecast products included within the connector are:
- Performance Details
- Safe Score Details
- User Data
- Watchlist Details
","[{""title"": ""Resource group"", ""description"": ""You need to have a resource group created with a subscription you are going to use.""}, {""title"": ""Functions app"", ""description"": ""You need to have an Azure App registered for this connector to use\n1. Application Id\n2. Tenant Id\n3. Client Id\n4. Client Secret\n5. Entra Object ID""}, {""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Mimecast Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of Mimecast Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Mimecast Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 4 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Mimecast API authorization key(s) or Token, readily available.""}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Mimecast Awareness Training Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastAT-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-MimecastAT-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Region**. \n3. Enter the below information : \n\n\t a. Location - The location in which the data collection rules and data collection endpoints should be deployed\n\n\t b. WorkspaceName - Enter Microsoft Sentinel Workspace Name of Log Analytics workspace\n\n\t c. AzureClientID - Enter Azure Client ID that you have created during app registration\n\n\t d. AzureClientSecret - Enter Azure Client Secret that you have created during creating the client secret\n\n\t e. AzureTenantID - Enter Azure Tenant ID of your Azure Active Directory\n\n\t f. AzureEntraObjectID - Enter Object id of your Microsoft Entra App\n\n\t g. MimecastBaseURL - Enter Base URL of Mimecast API 2.0 (e.g. https://api.services.mimecast.com)\n\n\t h. MimecastClientID - Enter Mimecast Client ID for authentication\n\n\t i. MimecastClientSecret - Enter Mimecast Client Secret for authentication\n\n\t j. MimecastAwarenessPerformanceDetailsTableName - Enter name of the table used to store Awareness Performance Details data. Default is 'Awareness_Performance_Details'\n\n\t k. MimecastAwarenessUserDataTableName - Enter name of the table used to store Awareness User Data data. Default is 'Awareness_User_Data'\n\n\t l. MimecastAwarenessWatchlistDetailsTableName - Enter name of the table used to store Awareness Watchlist Details data. Default is 'Awareness_Watchlist_Details'\n\n\t m. MimecastAwarenessSafeScoreDetailsTableName - Enter name of the table used to store Awareness SafeScore Details data. Default is 'Awareness_SafeScore_Details'\n\n\t n. StartDate - Enter the start date in the 'yyyy-mm-dd' format. If you do not provide a date, data from the last 60 days will be fetched automatically. Ensure that the date is in the past and properly formatted\n\n\t o. Schedule - Please enter a valid Quartz cron-expression. (Example: 0 0 */1 * * *) Do not keep the value empty, minimum value is 10 minutes\n\n\t p. LogLevel - Please add log level or log severity value. By default it is set to INFO\n\n\t q. AppInsightsWorkspaceResourceId - Migrate Classic Application Insights to Log Analytic Workspace which is retiring by 29 Febraury 2024. Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""See the documentation to learn more about API on the [Rest API reference](https://integrations.mimecast.com/documentation/)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast/Data%20Connectors/MimecastAT/Mimecast_AT_FunctionApp.json","true" -"Awareness_SafeScore_Details_CL","Mimecast","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecast","2024-09-10","2024-09-10","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastATAPI","Mimecast","Mimecast Awareness Training","The data connector for [Mimecast Awareness Training](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) provides customers with the visibility into security events related to the Targeted Threat Protection inspection technologies within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities.
The Mimecast products included within the connector are:
- Performance Details
- Safe Score Details
- User Data
- Watchlist Details
","[{""title"": ""Resource group"", ""description"": ""You need to have a resource group created with a subscription you are going to use.""}, {""title"": ""Functions app"", ""description"": ""You need to have an Azure App registered for this connector to use\n1. Application Id\n2. Tenant Id\n3. Client Id\n4. Client Secret\n5. Entra Object ID""}, {""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Mimecast Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of Mimecast Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Mimecast Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 4 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Mimecast API authorization key(s) or Token, readily available.""}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Mimecast Awareness Training Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastAT-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-MimecastAT-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Region**. \n3. Enter the below information : \n\n\t a. Location - The location in which the data collection rules and data collection endpoints should be deployed\n\n\t b. WorkspaceName - Enter Microsoft Sentinel Workspace Name of Log Analytics workspace\n\n\t c. AzureClientID - Enter Azure Client ID that you have created during app registration\n\n\t d. AzureClientSecret - Enter Azure Client Secret that you have created during creating the client secret\n\n\t e. AzureTenantID - Enter Azure Tenant ID of your Azure Active Directory\n\n\t f. AzureEntraObjectID - Enter Object id of your Microsoft Entra App\n\n\t g. MimecastBaseURL - Enter Base URL of Mimecast API 2.0 (e.g. https://api.services.mimecast.com)\n\n\t h. MimecastClientID - Enter Mimecast Client ID for authentication\n\n\t i. MimecastClientSecret - Enter Mimecast Client Secret for authentication\n\n\t j. MimecastAwarenessPerformanceDetailsTableName - Enter name of the table used to store Awareness Performance Details data. Default is 'Awareness_Performance_Details'\n\n\t k. MimecastAwarenessUserDataTableName - Enter name of the table used to store Awareness User Data data. Default is 'Awareness_User_Data'\n\n\t l. MimecastAwarenessWatchlistDetailsTableName - Enter name of the table used to store Awareness Watchlist Details data. Default is 'Awareness_Watchlist_Details'\n\n\t m. MimecastAwarenessSafeScoreDetailsTableName - Enter name of the table used to store Awareness SafeScore Details data. Default is 'Awareness_SafeScore_Details'\n\n\t n. StartDate - Enter the start date in the 'yyyy-mm-dd' format. If you do not provide a date, data from the last 60 days will be fetched automatically. Ensure that the date is in the past and properly formatted\n\n\t o. Schedule - Please enter a valid Quartz cron-expression. (Example: 0 0 */1 * * *) Do not keep the value empty, minimum value is 10 minutes\n\n\t p. LogLevel - Please add log level or log severity value. By default it is set to INFO\n\n\t q. AppInsightsWorkspaceResourceId - Migrate Classic Application Insights to Log Analytic Workspace which is retiring by 29 Febraury 2024. Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""See the documentation to learn more about API on the [Rest API reference](https://integrations.mimecast.com/documentation/)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast/Data%20Connectors/MimecastAT/Mimecast_AT_FunctionApp.json","true" -"Awareness_User_Data_CL","Mimecast","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecast","2024-09-10","2024-09-10","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastATAPI","Mimecast","Mimecast Awareness Training","The data connector for [Mimecast Awareness Training](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) provides customers with the visibility into security events related to the Targeted Threat Protection inspection technologies within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities.
The Mimecast products included within the connector are:
- Performance Details
- Safe Score Details
- User Data
- Watchlist Details
","[{""title"": ""Resource group"", ""description"": ""You need to have a resource group created with a subscription you are going to use.""}, {""title"": ""Functions app"", ""description"": ""You need to have an Azure App registered for this connector to use\n1. Application Id\n2. Tenant Id\n3. Client Id\n4. Client Secret\n5. Entra Object ID""}, {""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Mimecast Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of Mimecast Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Mimecast Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 4 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Mimecast API authorization key(s) or Token, readily available.""}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Mimecast Awareness Training Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastAT-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-MimecastAT-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Region**. \n3. Enter the below information : \n\n\t a. Location - The location in which the data collection rules and data collection endpoints should be deployed\n\n\t b. WorkspaceName - Enter Microsoft Sentinel Workspace Name of Log Analytics workspace\n\n\t c. AzureClientID - Enter Azure Client ID that you have created during app registration\n\n\t d. AzureClientSecret - Enter Azure Client Secret that you have created during creating the client secret\n\n\t e. AzureTenantID - Enter Azure Tenant ID of your Azure Active Directory\n\n\t f. AzureEntraObjectID - Enter Object id of your Microsoft Entra App\n\n\t g. MimecastBaseURL - Enter Base URL of Mimecast API 2.0 (e.g. https://api.services.mimecast.com)\n\n\t h. MimecastClientID - Enter Mimecast Client ID for authentication\n\n\t i. MimecastClientSecret - Enter Mimecast Client Secret for authentication\n\n\t j. MimecastAwarenessPerformanceDetailsTableName - Enter name of the table used to store Awareness Performance Details data. Default is 'Awareness_Performance_Details'\n\n\t k. MimecastAwarenessUserDataTableName - Enter name of the table used to store Awareness User Data data. Default is 'Awareness_User_Data'\n\n\t l. MimecastAwarenessWatchlistDetailsTableName - Enter name of the table used to store Awareness Watchlist Details data. Default is 'Awareness_Watchlist_Details'\n\n\t m. MimecastAwarenessSafeScoreDetailsTableName - Enter name of the table used to store Awareness SafeScore Details data. Default is 'Awareness_SafeScore_Details'\n\n\t n. StartDate - Enter the start date in the 'yyyy-mm-dd' format. If you do not provide a date, data from the last 60 days will be fetched automatically. Ensure that the date is in the past and properly formatted\n\n\t o. Schedule - Please enter a valid Quartz cron-expression. (Example: 0 0 */1 * * *) Do not keep the value empty, minimum value is 10 minutes\n\n\t p. LogLevel - Please add log level or log severity value. By default it is set to INFO\n\n\t q. AppInsightsWorkspaceResourceId - Migrate Classic Application Insights to Log Analytic Workspace which is retiring by 29 Febraury 2024. Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""See the documentation to learn more about API on the [Rest API reference](https://integrations.mimecast.com/documentation/)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast/Data%20Connectors/MimecastAT/Mimecast_AT_FunctionApp.json","true" -"Awareness_Watchlist_Details_CL","Mimecast","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecast","2024-09-10","2024-09-10","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastATAPI","Mimecast","Mimecast Awareness Training","The data connector for [Mimecast Awareness Training](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) provides customers with the visibility into security events related to the Targeted Threat Protection inspection technologies within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities.
The Mimecast products included within the connector are:
- Performance Details
- Safe Score Details
- User Data
- Watchlist Details
","[{""title"": ""Resource group"", ""description"": ""You need to have a resource group created with a subscription you are going to use.""}, {""title"": ""Functions app"", ""description"": ""You need to have an Azure App registered for this connector to use\n1. Application Id\n2. Tenant Id\n3. Client Id\n4. Client Secret\n5. Entra Object ID""}, {""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Mimecast Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of Mimecast Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Mimecast Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 4 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Mimecast API authorization key(s) or Token, readily available.""}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Mimecast Awareness Training Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastAT-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-MimecastAT-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Region**. \n3. Enter the below information : \n\n\t a. Location - The location in which the data collection rules and data collection endpoints should be deployed\n\n\t b. WorkspaceName - Enter Microsoft Sentinel Workspace Name of Log Analytics workspace\n\n\t c. AzureClientID - Enter Azure Client ID that you have created during app registration\n\n\t d. AzureClientSecret - Enter Azure Client Secret that you have created during creating the client secret\n\n\t e. AzureTenantID - Enter Azure Tenant ID of your Azure Active Directory\n\n\t f. AzureEntraObjectID - Enter Object id of your Microsoft Entra App\n\n\t g. MimecastBaseURL - Enter Base URL of Mimecast API 2.0 (e.g. https://api.services.mimecast.com)\n\n\t h. MimecastClientID - Enter Mimecast Client ID for authentication\n\n\t i. MimecastClientSecret - Enter Mimecast Client Secret for authentication\n\n\t j. MimecastAwarenessPerformanceDetailsTableName - Enter name of the table used to store Awareness Performance Details data. Default is 'Awareness_Performance_Details'\n\n\t k. MimecastAwarenessUserDataTableName - Enter name of the table used to store Awareness User Data data. Default is 'Awareness_User_Data'\n\n\t l. MimecastAwarenessWatchlistDetailsTableName - Enter name of the table used to store Awareness Watchlist Details data. Default is 'Awareness_Watchlist_Details'\n\n\t m. MimecastAwarenessSafeScoreDetailsTableName - Enter name of the table used to store Awareness SafeScore Details data. Default is 'Awareness_SafeScore_Details'\n\n\t n. StartDate - Enter the start date in the 'yyyy-mm-dd' format. If you do not provide a date, data from the last 60 days will be fetched automatically. Ensure that the date is in the past and properly formatted\n\n\t o. Schedule - Please enter a valid Quartz cron-expression. (Example: 0 0 */1 * * *) Do not keep the value empty, minimum value is 10 minutes\n\n\t p. LogLevel - Please add log level or log severity value. By default it is set to INFO\n\n\t q. AppInsightsWorkspaceResourceId - Migrate Classic Application Insights to Log Analytic Workspace which is retiring by 29 Febraury 2024. Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""See the documentation to learn more about API on the [Rest API reference](https://integrations.mimecast.com/documentation/)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast/Data%20Connectors/MimecastAT/Mimecast_AT_FunctionApp.json","true" -"Audit_CL","Mimecast","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecast","2024-09-10","2024-09-10","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastAuditAPI","Mimecast","Mimecast Audit","The data connector for [Mimecast Audit](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) provides customers with the visibility into security events related to audit and authentication events within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into user activity, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities.
The Mimecast products included within the connector are:
Audit
","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Configuration:"", ""description"": ""**STEP 1 - Configuration steps for the Mimecast API**\n\nGo to ***Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> New client secret*** and create a new secret (save the Value somewhere safe right away because you will not be able to preview it later)""}, {""title"": """", ""description"": ""**STEP 2 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Mimecast API authorization key(s) or Token, readily available.""}, {""title"": """", ""description"": ""**STEP 3 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 4 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TenableVM Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 5 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": ""Deploy the Mimecast Audit Data Connector:"", ""description"": ""Use this method for automated deployment of the Mimecast Audit Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastAuditAzureDeploy-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-MimecastAuditAzureDeploy-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Region**. \n3. Enter the below information : \n\n\t a. Location - The location in which the data collection rules and data collection endpoints should be deployed\n\n\t b. WorkspaceName - Enter Microsoft Sentinel Workspace Name of Log Analytics workspace\n\n\t c. AzureClientID - Enter Azure Client ID that you have created during app registration\n\n\t d. AzureClientSecret - Enter Azure Client Secret that you have created during creating the client secret\n\n\t e. AzureTenantID - Enter Azure Tenant ID of your Azure Active Directory\n\n\t f. AzureEntraObjectID - Enter Object id of your Microsoft Entra App\n\n\t g. MimecastBaseURL - Enter Base URL of Mimecast API 2.0 (e.g. https://api.services.mimecast.com)\n\n\t h. MimecastClientID - Enter Mimecast Client ID for authentication\n\n\t i. MimecastClientSecret - Enter Mimecast Client Secret for authentication\n\n\t j. MimecastAuditTableName - Enter name of the table used to store Audit data. Default is 'Audit'\n\n\t k. StartDate - Enter the start date in the 'yyyy-mm-dd' format. If you do not provide a date, data from the last 60 days will be fetched automatically. Ensure that the date is in the past and properly formatted\n\n\t l. Schedule - Please enter a valid Quartz cron-expression. (Example: 0 0 */1 * * *) Do not keep the value empty, minimum value is 10 minutes\n\n\t m. LogLevel - Please add log level or log severity value. By default it is set to INFO\n\n\t n. AppInsightsWorkspaceResourceId - Migrate Classic Application Insights to Log Analytic Workspace which is retiring by 29 Febraury 2024. Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""See the documentation to learn more about API on the [Rest API reference](https://integrations.mimecast.com/documentation/)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast/Data%20Connectors/MimecastAudit/Mimecast_Audit_FunctionApp.json","true" -"Cloud_Integrated_CL","Mimecast","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecast","2024-09-10","2024-09-10","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastCIAPI","Mimecast","Mimecast Cloud Integrated","The data connector for [Mimecast Cloud Integrated](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) provides customers with the visibility into security events related to the Cloud Integrated inspection technologies within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities.","[{""title"": ""Resource group"", ""description"": ""You need to have a resource group created with a subscription you are going to use.""}, {""title"": ""Functions app"", ""description"": ""You need to have an Azure App registered for this connector to use\n1. Application Id\n2. Tenant Id\n3. Client Id\n4. Client Secret""}, {""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Configuration:"", ""description"": ""**STEP 1 - Configuration steps for the Mimecast API**\n\nGo to ***Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> New client secret*** and create a new secret (save the Value somewhere safe right away because you will not be able to preview it later)""}, {""title"": """", ""description"": ""**STEP 2 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Mimecast API authorization key(s) or Token, readily available.""}, {""title"": """", ""description"": ""**STEP 3 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 4 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TenableVM Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 5 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Mimecast Cloud Integrated Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastCI-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-MimecastCI-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Region**. \n3. Enter the below information : \n\n\t a. Location - The location in which the data collection rules and data collection endpoints should be deployed\n\n\t b. WorkspaceName - Enter Microsoft Sentinel Workspace Name of Log Analytics workspace\n\n\t c. AzureClientID - Enter Azure Client ID that you have created during app registration\n\n\t d. AzureClientSecret - Enter Azure Client Secret that you have created during creating the client secret\n\n\t e. AzureTenantID - Enter Azure Tenant ID of your Azure Active Directory\n\n\t f. AzureEntraObjectID - Enter Object id of your Microsoft Entra App\n\n\t g. MimecastBaseURL - Enter Base URL of Mimecast API 2.0 (e.g. https://api.services.mimecast.com)\n\n\t h. MimecastClientID - Enter Mimecast Client ID for authentication\n\n\t i. MimecastClientSecret - Enter Mimecast Client Secret for authentication\n\n\t j. MimecastCITableName - Enter name of the table used to store Cloud Integrated data. Default is 'Cloud_Integrated'\n\n\t k. StartDate - Enter the start date in the 'yyyy-mm-dd' format. If you do not provide a date, data from the last 60 days will be fetched automatically. Ensure that the date is in the past and properly formatted\n\n\t l. Schedule - Please enter a valid Quartz cron-expression. (Example: 0 0 */1 * * *) Do not keep the value empty, minimum value is 10 minutes\n\n\t m. LogLevel - Please add log level or log severity value. By default it is set to INFO\n\n\t n. AppInsightsWorkspaceResourceId - Migrate Classic Application Insights to Log Analytic Workspace which is retiring by 29 Febraury 2024. Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""See the documentation to learn more about API on the [Rest API reference](https://integrations.mimecast.com/documentation/)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast/Data%20Connectors/MimecastCloudIntegrated/Mimecast_Cloud_Integrated_FunctionApp.json","true" -"Seg_Cg_CL","Mimecast","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecast","2024-09-10","2024-09-10","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastSEGAPI","Mimecast","Mimecast Secure Email Gateway","The data connector for [Mimecast Secure Email Gateway](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) allows easy log collection from the Secure Email Gateway to surface email insight and user activity within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities. Mimecast products and features required:
- Mimecast Cloud Gateway
- Mimecast Data Leak Prevention
","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Configuration:"", ""description"": ""**STEP 1 - Configuration steps for the Mimecast API**\n\nGo to ***Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> New client secret*** and create a new secret (save the Value somewhere safe right away because you will not be able to preview it later)""}, {""title"": """", ""description"": ""****STEP 2 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Mimecast API authorization key(s) or Token, readily available.""}, {""title"": """", ""description"": ""**STEP 3 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 4 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TenableVM Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 5 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": ""Deploy the Mimecast Secure Email Gateway Data Connector:"", ""description"": ""Use this method for automated deployment of the Mimecast Secure Email Gateway Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastSEGAzureDeploy-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-MimecastSEGAzureDeploy-azuredeploy-gov)\n2. SSelect the preferred **Subscription**, **Resource Group** and **Region**. \n3. Enter the below information : \n\n\t a. Location - The location in which the data collection rules and data collection endpoints should be deployed\n\n\t b. WorkspaceName - Enter Microsoft Sentinel Workspace Name of Log Analytics workspace\n\n\t c. AzureClientID - Enter Azure Client ID that you have created during app registration\n\n\t d. AzureClientSecret - Enter Azure Client Secret that you have created during creating the client secret\n\n\t e. AzureTenantID - Enter Azure Tenant ID of your Azure Active Directory\n\n\t f. AzureEntraObjectID - Enter Object id of your Microsoft Entra App\n\n\t g. MimecastBaseURL - Enter Base URL of Mimecast API 2.0 (e.g. https://api.services.mimecast.com)\n\n\t h. MimecastClientID - Enter Mimecast Client ID for authentication\n\n\t i. MimecastClientSecret - Enter Mimecast Client Secret for authentication\n\n\t j. MimecastCGTableName - Enter name of the table used to store CG data. Default is 'Seg_Cg'\n\n\t k. MimecastDLPTableName - Enter name of the table used to store DLP data. Default is 'Seg_Dlp'\n\n\t l. StartDate - Enter the start date in the 'yyyy-mm-dd' format. If you do not provide a date, data from the last 60 days will be fetched automatically. Ensure that the date is in the past and properly formatted\n\n\t m. Schedule - Please enter a valid Quartz cron-expression. (Example: 0 0 */1 * * *) Do not keep the value empty, minimum value is 10 minutes\n\n\t n. LogLevel - Please add log level or log severity value. By default it is set to INFO\n\n\t o. AppInsightsWorkspaceResourceId - Migrate Classic Application Insights to Log Analytic Workspace which is retiring by 29 Febraury 2024. Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""See the documentation to learn more about API on the [Rest API reference](https://integrations.mimecast.com/documentation/)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast/Data%20Connectors/MimecastSEG/Mimecast_SEG_FunctionApp.json","true" -"Seg_Dlp_CL","Mimecast","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecast","2024-09-10","2024-09-10","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastSEGAPI","Mimecast","Mimecast Secure Email Gateway","The data connector for [Mimecast Secure Email Gateway](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) allows easy log collection from the Secure Email Gateway to surface email insight and user activity within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities. Mimecast products and features required:
- Mimecast Cloud Gateway
- Mimecast Data Leak Prevention
","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Configuration:"", ""description"": ""**STEP 1 - Configuration steps for the Mimecast API**\n\nGo to ***Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> New client secret*** and create a new secret (save the Value somewhere safe right away because you will not be able to preview it later)""}, {""title"": """", ""description"": ""****STEP 2 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Mimecast API authorization key(s) or Token, readily available.""}, {""title"": """", ""description"": ""**STEP 3 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 4 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TenableVM Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 5 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": ""Deploy the Mimecast Secure Email Gateway Data Connector:"", ""description"": ""Use this method for automated deployment of the Mimecast Secure Email Gateway Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastSEGAzureDeploy-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-MimecastSEGAzureDeploy-azuredeploy-gov)\n2. SSelect the preferred **Subscription**, **Resource Group** and **Region**. \n3. Enter the below information : \n\n\t a. Location - The location in which the data collection rules and data collection endpoints should be deployed\n\n\t b. WorkspaceName - Enter Microsoft Sentinel Workspace Name of Log Analytics workspace\n\n\t c. AzureClientID - Enter Azure Client ID that you have created during app registration\n\n\t d. AzureClientSecret - Enter Azure Client Secret that you have created during creating the client secret\n\n\t e. AzureTenantID - Enter Azure Tenant ID of your Azure Active Directory\n\n\t f. AzureEntraObjectID - Enter Object id of your Microsoft Entra App\n\n\t g. MimecastBaseURL - Enter Base URL of Mimecast API 2.0 (e.g. https://api.services.mimecast.com)\n\n\t h. MimecastClientID - Enter Mimecast Client ID for authentication\n\n\t i. MimecastClientSecret - Enter Mimecast Client Secret for authentication\n\n\t j. MimecastCGTableName - Enter name of the table used to store CG data. Default is 'Seg_Cg'\n\n\t k. MimecastDLPTableName - Enter name of the table used to store DLP data. Default is 'Seg_Dlp'\n\n\t l. StartDate - Enter the start date in the 'yyyy-mm-dd' format. If you do not provide a date, data from the last 60 days will be fetched automatically. Ensure that the date is in the past and properly formatted\n\n\t m. Schedule - Please enter a valid Quartz cron-expression. (Example: 0 0 */1 * * *) Do not keep the value empty, minimum value is 10 minutes\n\n\t n. LogLevel - Please add log level or log severity value. By default it is set to INFO\n\n\t o. AppInsightsWorkspaceResourceId - Migrate Classic Application Insights to Log Analytic Workspace which is retiring by 29 Febraury 2024. Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""See the documentation to learn more about API on the [Rest API reference](https://integrations.mimecast.com/documentation/)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast/Data%20Connectors/MimecastSEG/Mimecast_SEG_FunctionApp.json","true" -"Ttp_Attachment_CL","Mimecast","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecast","2024-09-10","2024-09-10","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastTTPAPI","Mimecast","Mimecast Targeted Threat Protection","The data connector for [Mimecast Targeted Threat Protection](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) provides customers with the visibility into security events related to the Targeted Threat Protection inspection technologies within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities.
The Mimecast products included within the connector are:
- URL Protect
- Impersonation Protect
- Attachment Protect
","[{""title"": ""Resource group"", ""description"": ""You need to have a resource group created with a subscription you are going to use.""}, {""title"": ""Functions app"", ""description"": ""You need to have an Azure App registered for this connector to use\n1. Application Id\n2. Tenant Id\n3. Client Id\n4. Client Secret""}, {""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Mimecast Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of Mimecast Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Mimecast Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 4 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Mimecast API authorization key(s) or Token, readily available.""}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Mimecast Targeted Threat Protection Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastTTPAzureDeploy-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-MimecastTTPAzureDeploy-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Region**. \n3. Enter the below information : \n\n\t a. Location - The location in which the data collection rules and data collection endpoints should be deployed\n\n\t b. WorkspaceName - Enter Microsoft Sentinel Workspace Name of Log Analytics workspace\n\n\t c. AzureClientID - Enter Azure Client ID that you have created during app registration\n\n\t d. AzureClientSecret - Enter Azure Client Secret that you have created during creating the client secret\n\n\t e. AzureTenantID - Enter Azure Tenant ID of your Azure Active Directory\n\n\t f. AzureEntraObjectID - Enter Object id of your Microsoft Entra App\n\n\t g. MimecastBaseURL - Enter Base URL of Mimecast API 2.0 (e.g. https://api.services.mimecast.com)\n\n\t h. MimecastClientID - Enter Mimecast Client ID for authentication\n\n\t i. MimecastClientSecret - Enter Mimecast Client Secret for authentication\n\n\t j. StartDate - Enter the start date in the 'yyyy-mm-dd' format. If you do not provide a date, data from the last 60 days will be fetched automatically. Ensure that the date is in the past and properly formatted\n\n\t k. MimecastTTPAttachmentTableName - Enter name of the table used to store TTP Attachment data. Default is 'Ttp_Attachment'\n\n\t l. MimecastTTPImpersonationTableName - Enter name of the table used to store TTP Impersonation data. Default is 'Ttp_Impersonation'\n\n\t m. MimecastTTPUrlTableName - Enter name of the table used to store TTP Attachment data. Default is 'Ttp_Url'\n\n\t n. Schedule - Please enter a valid Quartz cron-expression. (Example: 0 0 */1 * * *) Do not keep the value empty, minimum value is 10 minutes\n\n\t l. LogLevel - Please add log level or log severity value. By default it is set to INFO\n\n\t o. AppInsightsWorkspaceResourceId - Migrate Classic Application Insights to Log Analytic Workspace which is retiring by 29 Febraury 2024. Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""See the documentation to learn more about API on the [Rest API reference](https://integrations.mimecast.com/documentation/)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast/Data%20Connectors/MimecastTTP/Mimecast_TTP_FunctionApp.json","true" -"Ttp_Impersonation_CL","Mimecast","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecast","2024-09-10","2024-09-10","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastTTPAPI","Mimecast","Mimecast Targeted Threat Protection","The data connector for [Mimecast Targeted Threat Protection](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) provides customers with the visibility into security events related to the Targeted Threat Protection inspection technologies within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities.
The Mimecast products included within the connector are:
- URL Protect
- Impersonation Protect
- Attachment Protect
","[{""title"": ""Resource group"", ""description"": ""You need to have a resource group created with a subscription you are going to use.""}, {""title"": ""Functions app"", ""description"": ""You need to have an Azure App registered for this connector to use\n1. Application Id\n2. Tenant Id\n3. Client Id\n4. Client Secret""}, {""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Mimecast Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of Mimecast Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Mimecast Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 4 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Mimecast API authorization key(s) or Token, readily available.""}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Mimecast Targeted Threat Protection Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastTTPAzureDeploy-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-MimecastTTPAzureDeploy-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Region**. \n3. Enter the below information : \n\n\t a. Location - The location in which the data collection rules and data collection endpoints should be deployed\n\n\t b. WorkspaceName - Enter Microsoft Sentinel Workspace Name of Log Analytics workspace\n\n\t c. AzureClientID - Enter Azure Client ID that you have created during app registration\n\n\t d. AzureClientSecret - Enter Azure Client Secret that you have created during creating the client secret\n\n\t e. AzureTenantID - Enter Azure Tenant ID of your Azure Active Directory\n\n\t f. AzureEntraObjectID - Enter Object id of your Microsoft Entra App\n\n\t g. MimecastBaseURL - Enter Base URL of Mimecast API 2.0 (e.g. https://api.services.mimecast.com)\n\n\t h. MimecastClientID - Enter Mimecast Client ID for authentication\n\n\t i. MimecastClientSecret - Enter Mimecast Client Secret for authentication\n\n\t j. StartDate - Enter the start date in the 'yyyy-mm-dd' format. If you do not provide a date, data from the last 60 days will be fetched automatically. Ensure that the date is in the past and properly formatted\n\n\t k. MimecastTTPAttachmentTableName - Enter name of the table used to store TTP Attachment data. Default is 'Ttp_Attachment'\n\n\t l. MimecastTTPImpersonationTableName - Enter name of the table used to store TTP Impersonation data. Default is 'Ttp_Impersonation'\n\n\t m. MimecastTTPUrlTableName - Enter name of the table used to store TTP Attachment data. Default is 'Ttp_Url'\n\n\t n. Schedule - Please enter a valid Quartz cron-expression. (Example: 0 0 */1 * * *) Do not keep the value empty, minimum value is 10 minutes\n\n\t l. LogLevel - Please add log level or log severity value. By default it is set to INFO\n\n\t o. AppInsightsWorkspaceResourceId - Migrate Classic Application Insights to Log Analytic Workspace which is retiring by 29 Febraury 2024. Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""See the documentation to learn more about API on the [Rest API reference](https://integrations.mimecast.com/documentation/)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast/Data%20Connectors/MimecastTTP/Mimecast_TTP_FunctionApp.json","true" -"Ttp_Url_CL","Mimecast","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecast","2024-09-10","2024-09-10","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastTTPAPI","Mimecast","Mimecast Targeted Threat Protection","The data connector for [Mimecast Targeted Threat Protection](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) provides customers with the visibility into security events related to the Targeted Threat Protection inspection technologies within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities.
The Mimecast products included within the connector are:
- URL Protect
- Impersonation Protect
- Attachment Protect
","[{""title"": ""Resource group"", ""description"": ""You need to have a resource group created with a subscription you are going to use.""}, {""title"": ""Functions app"", ""description"": ""You need to have an Azure App registered for this connector to use\n1. Application Id\n2. Tenant Id\n3. Client Id\n4. Client Secret""}, {""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Mimecast Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of Mimecast Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Mimecast Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 4 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Mimecast API authorization key(s) or Token, readily available.""}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Mimecast Targeted Threat Protection Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastTTPAzureDeploy-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-MimecastTTPAzureDeploy-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Region**. \n3. Enter the below information : \n\n\t a. Location - The location in which the data collection rules and data collection endpoints should be deployed\n\n\t b. WorkspaceName - Enter Microsoft Sentinel Workspace Name of Log Analytics workspace\n\n\t c. AzureClientID - Enter Azure Client ID that you have created during app registration\n\n\t d. AzureClientSecret - Enter Azure Client Secret that you have created during creating the client secret\n\n\t e. AzureTenantID - Enter Azure Tenant ID of your Azure Active Directory\n\n\t f. AzureEntraObjectID - Enter Object id of your Microsoft Entra App\n\n\t g. MimecastBaseURL - Enter Base URL of Mimecast API 2.0 (e.g. https://api.services.mimecast.com)\n\n\t h. MimecastClientID - Enter Mimecast Client ID for authentication\n\n\t i. MimecastClientSecret - Enter Mimecast Client Secret for authentication\n\n\t j. StartDate - Enter the start date in the 'yyyy-mm-dd' format. If you do not provide a date, data from the last 60 days will be fetched automatically. Ensure that the date is in the past and properly formatted\n\n\t k. MimecastTTPAttachmentTableName - Enter name of the table used to store TTP Attachment data. Default is 'Ttp_Attachment'\n\n\t l. MimecastTTPImpersonationTableName - Enter name of the table used to store TTP Impersonation data. Default is 'Ttp_Impersonation'\n\n\t m. MimecastTTPUrlTableName - Enter name of the table used to store TTP Attachment data. Default is 'Ttp_Url'\n\n\t n. Schedule - Please enter a valid Quartz cron-expression. (Example: 0 0 */1 * * *) Do not keep the value empty, minimum value is 10 minutes\n\n\t l. LogLevel - Please add log level or log severity value. By default it is set to INFO\n\n\t o. AppInsightsWorkspaceResourceId - Migrate Classic Application Insights to Log Analytic Workspace which is retiring by 29 Febraury 2024. Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""See the documentation to learn more about API on the [Rest API reference](https://integrations.mimecast.com/documentation/)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast/Data%20Connectors/MimecastTTP/Mimecast_TTP_FunctionApp.json","true" -"","MimecastAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastAudit","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecastaudit","2022-02-24","2022-02-24","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","","","","","","","false","","false" -"MimecastAudit_CL","MimecastAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastAudit","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecastaudit","2022-02-24","2022-02-24","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastAuditAPI","Mimecast","Mimecast Audit & Authentication","The data connector for [Mimecast Audit & Authentication](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) provides customers with the visibility into security events related to audit and authentication events within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into user activity, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities.
The Mimecast products included within the connector are:
Audit & Authentication
","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Configuration:"", ""description"": ""**STEP 1 - Configuration steps for the Mimecast API**\n\nGo to ***Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> New client secret*** and create a new secret (save the Value somewhere safe right away because you will not be able to preview it later)""}, {""title"": """", ""description"": ""**STEP 2 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Mimecast API authorization key(s) or Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Deploy the Mimecast Audit & Authentication Data Connector:"", ""description"": ""\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastAudit-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the following fields:\n - appName: Unique string that will be used as id for the app in Azure platform\n - objectId: Azure portal ---> Azure Active Directory ---> more info ---> Profile -----> Object ID\n - appInsightsLocation(default): westeurope\n - mimecastEmail: Email address of dedicated user for this integraion\n - mimecastPassword: Password for dedicated user\n - mimecastAppId: Application Id from the Microsoft Sentinel app registered with Mimecast\n - mimecastAppKey: Application Key from the Microsoft Sentinel app registered with Mimecast\n - mimecastAccessKey: Access Key for the dedicated Mimecast user\n - mimecastSecretKey: Secret Key for dedicated Mimecast user\n - mimecastBaseURL: Regional Mimecast API Base URL\n - activeDirectoryAppId: Azure portal ---> App registrations ---> [your_app] ---> Application ID\n - activeDirectoryAppSecret: Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> [your_app_secret]\n - workspaceId: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Workspace ID (or you can copy workspaceId from above) \n - workspaceKey: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Primary Key (or you can copy workspaceKey from above) \n - AppInsightsWorkspaceResourceID : Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Properties ---> Resource ID \n\n >Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy. \n\n6. Go to ***Azure portal ---> Resource groups ---> [your_resource_group] ---> [appName](type: Storage account) ---> Storage Explorer ---> BLOB CONTAINERS ---> Audit checkpoints ---> Upload*** and create empty file on your machine named checkpoint.txt and select it for upload (this is done so that date_range for SIEM logs is stored in consistent state)\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Mimecast API credentials"", ""description"": ""You need to have the following pieces of information to configure the integration:\n- mimecastEmail: Email address of a dedicated Mimecast admin user\n- mimecastPassword: Password for the dedicated Mimecast admin user\n- mimecastAppId: API Application Id of the Mimecast Microsoft Sentinel app registered with Mimecast\n- mimecastAppKey: API Application Key of the Mimecast Microsoft Sentinel app registered with Mimecast\n- mimecastAccessKey: Access Key for the dedicated Mimecast admin user\n- mimecastSecretKey: Secret Key for the dedicated Mimecast admin user\n- mimecastBaseURL: Mimecast Regional API Base URL\n\n> The Mimecast Application Id, Application Key, along with the Access Key and Secret keys for the dedicated Mimecast admin user are obtainable via the Mimecast Administration Console: Administration | Services | API and Platform Integrations.\n\n> The Mimecast API Base URL for each region is documented here: https://integrations.mimecast.com/documentation/api-overview/global-base-urls/""}, {""name"": ""Resource group"", ""description"": ""You need to have a resource group created with a subscription you are going to use.""}, {""name"": ""Functions app"", ""description"": ""You need to have an Azure App registered for this connector to use\n1. Application Id\n2. Tenant Id\n3. Client Id\n4. Client Secret""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastAudit/Data%20Connectors/MimecastAudit_API_AzureFunctionApp.json","true" -"","MimecastSEG","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastSEG","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecastseg","2022-02-24","2022-02-24","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","","","","","","","false","","false" -"MimecastDLP_CL","MimecastSEG","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastSEG","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecastseg","2022-02-24","2022-02-24","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastSIEMAPI","Mimecast","Mimecast Secure Email Gateway","The data connector for [Mimecast Secure Email Gateway](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) allows easy log collection from the Secure Email Gateway to surface email insight and user activity within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities. Mimecast products and features required:
- Mimecast Secure Email Gateway
- Mimecast Data Leak Prevention
","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Configuration:"", ""description"": ""**STEP 1 - Configuration steps for the Mimecast API**\n\nGo to ***Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> New client secret*** and create a new secret (save the Value somewhere safe right away because you will not be able to preview it later)""}, {""title"": """", ""description"": ""**STEP 2 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Mimecast API authorization key(s) or Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Deploy the Mimecast Secure Email Gateway Data Connector:"", ""description"": ""\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastSEG-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the following fields:\n - appName: Unique string that will be used as id for the app in Azure platform\n - objectId: Azure portal ---> Azure Active Directory ---> more info ---> Profile -----> Object ID\n - appInsightsLocation(default): westeurope\n - mimecastEmail: Email address of dedicated user for this integraion\n - mimecastPassword: Password for dedicated user\n - mimecastAppId: Application Id from the Microsoft Sentinel app registered with Mimecast\n - mimecastAppKey: Application Key from the Microsoft Sentinel app registered with Mimecast\n - mimecastAccessKey: Access Key for the dedicated Mimecast user\n - mimecastSecretKey: Secret Key for dedicated Mimecast user\n - mimecastBaseURL: Regional Mimecast API Base URL\n - activeDirectoryAppId: Azure portal ---> App registrations ---> [your_app] ---> Application ID\n - activeDirectoryAppSecret: Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> [your_app_secret]\n - workspaceId: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Workspace ID (or you can copy workspaceId from above) \n - workspaceKey: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Primary Key (or you can copy workspaceKey from above) \n - AppInsightsWorkspaceResourceID : Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Properties ---> Resource ID \n\n >Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n\n6. Go to ***Azure portal ---> Resource groups ---> [your_resource_group] ---> [appName](type: Storage account) ---> Storage Explorer ---> BLOB CONTAINERS ---> SIEM checkpoints ---> Upload*** and create empty file on your machine named checkpoint.txt, dlp-checkpoint.txt and select it for upload (this is done so that date_range for SIEM logs is stored in consistent state)\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Mimecast API credentials"", ""description"": ""You need to have the following pieces of information to configure the integration:\n- mimecastEmail: Email address of a dedicated Mimecast admin user\n- mimecastPassword: Password for the dedicated Mimecast admin user\n- mimecastAppId: API Application Id of the Mimecast Microsoft Sentinel app registered with Mimecast\n- mimecastAppKey: API Application Key of the Mimecast Microsoft Sentinel app registered with Mimecast\n- mimecastAccessKey: Access Key for the dedicated Mimecast admin user\n- mimecastSecretKey: Secret Key for the dedicated Mimecast admin user\n- mimecastBaseURL: Mimecast Regional API Base URL\n\n> The Mimecast Application Id, Application Key, along with the Access Key and Secret keys for the dedicated Mimecast admin user are obtainable via the Mimecast Administration Console: Administration | Services | API and Platform Integrations.\n\n> The Mimecast API Base URL for each region is documented here: https://integrations.mimecast.com/documentation/api-overview/global-base-urls/""}, {""name"": ""Resource group"", ""description"": ""You need to have a resource group created with a subscription you are going to use.""}, {""name"": ""Functions app"", ""description"": ""You need to have an Azure App registered for this connector to use\n1. Application Id\n2. Tenant Id\n3. Client Id\n4. Client Secret""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastSEG/Data%20Connectors/MimecastSEG_API_AzureFunctionApp.json","true" -"MimecastSIEM_CL","MimecastSEG","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastSEG","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecastseg","2022-02-24","2022-02-24","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastSIEMAPI","Mimecast","Mimecast Secure Email Gateway","The data connector for [Mimecast Secure Email Gateway](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) allows easy log collection from the Secure Email Gateway to surface email insight and user activity within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities. Mimecast products and features required:
- Mimecast Secure Email Gateway
- Mimecast Data Leak Prevention
","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Configuration:"", ""description"": ""**STEP 1 - Configuration steps for the Mimecast API**\n\nGo to ***Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> New client secret*** and create a new secret (save the Value somewhere safe right away because you will not be able to preview it later)""}, {""title"": """", ""description"": ""**STEP 2 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Mimecast API authorization key(s) or Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Deploy the Mimecast Secure Email Gateway Data Connector:"", ""description"": ""\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastSEG-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the following fields:\n - appName: Unique string that will be used as id for the app in Azure platform\n - objectId: Azure portal ---> Azure Active Directory ---> more info ---> Profile -----> Object ID\n - appInsightsLocation(default): westeurope\n - mimecastEmail: Email address of dedicated user for this integraion\n - mimecastPassword: Password for dedicated user\n - mimecastAppId: Application Id from the Microsoft Sentinel app registered with Mimecast\n - mimecastAppKey: Application Key from the Microsoft Sentinel app registered with Mimecast\n - mimecastAccessKey: Access Key for the dedicated Mimecast user\n - mimecastSecretKey: Secret Key for dedicated Mimecast user\n - mimecastBaseURL: Regional Mimecast API Base URL\n - activeDirectoryAppId: Azure portal ---> App registrations ---> [your_app] ---> Application ID\n - activeDirectoryAppSecret: Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> [your_app_secret]\n - workspaceId: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Workspace ID (or you can copy workspaceId from above) \n - workspaceKey: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Primary Key (or you can copy workspaceKey from above) \n - AppInsightsWorkspaceResourceID : Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Properties ---> Resource ID \n\n >Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n\n6. Go to ***Azure portal ---> Resource groups ---> [your_resource_group] ---> [appName](type: Storage account) ---> Storage Explorer ---> BLOB CONTAINERS ---> SIEM checkpoints ---> Upload*** and create empty file on your machine named checkpoint.txt, dlp-checkpoint.txt and select it for upload (this is done so that date_range for SIEM logs is stored in consistent state)\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Mimecast API credentials"", ""description"": ""You need to have the following pieces of information to configure the integration:\n- mimecastEmail: Email address of a dedicated Mimecast admin user\n- mimecastPassword: Password for the dedicated Mimecast admin user\n- mimecastAppId: API Application Id of the Mimecast Microsoft Sentinel app registered with Mimecast\n- mimecastAppKey: API Application Key of the Mimecast Microsoft Sentinel app registered with Mimecast\n- mimecastAccessKey: Access Key for the dedicated Mimecast admin user\n- mimecastSecretKey: Secret Key for the dedicated Mimecast admin user\n- mimecastBaseURL: Mimecast Regional API Base URL\n\n> The Mimecast Application Id, Application Key, along with the Access Key and Secret keys for the dedicated Mimecast admin user are obtainable via the Mimecast Administration Console: Administration | Services | API and Platform Integrations.\n\n> The Mimecast API Base URL for each region is documented here: https://integrations.mimecast.com/documentation/api-overview/global-base-urls/""}, {""name"": ""Resource group"", ""description"": ""You need to have a resource group created with a subscription you are going to use.""}, {""name"": ""Functions app"", ""description"": ""You need to have an Azure App registered for this connector to use\n1. Application Id\n2. Tenant Id\n3. Client Id\n4. Client Secret""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastSEG/Data%20Connectors/MimecastSEG_API_AzureFunctionApp.json","true" -"","MimecastTIRegional","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastTIRegional","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecasttiregional","2023-08-23","2023-09-11","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","","","","","","","false","","false" -"Event","MimecastTIRegional","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastTIRegional","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecasttiregional","2023-08-23","2023-09-11","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastTIRegionalConnectorAzureFunctions","Mimecast","Mimecast Intelligence for Microsoft - Microsoft Sentinel","The data connector for Mimecast Intelligence for Microsoft provides regional threat intelligence curated from Mimecast’s email inspection technologies with pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times.
Mimecast products and features required:
- Mimecast Secure Email Gateway
- Mimecast Threat Intelligence
","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Configuration:"", ""description"": ""**STEP 1 - Configuration steps for the Mimecast API**\n\nGo to ***Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> New client secret*** and create a new secret (save the Value somewhere safe right away because you will not be able to preview it later)""}, {""title"": """", ""description"": ""**STEP 2 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Mimecast API authorization key(s) or Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Enable Mimecast Intelligence for Microsoft - Microsoft Sentinel Connector:"", ""description"": ""\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastTIRegional-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the following fields:\n - appName: Unique string that will be used as id for the app in Azure platform\n - objectId: Azure portal ---> Azure Active Directory ---> more info ---> Profile -----> Object ID\n - appInsightsLocation(default): westeurope\n - mimecastEmail: Email address of dedicated user for this integraion\n - mimecastPassword: Password for dedicated user\n - mimecastAppId: Application Id from the Microsoft Sentinel app registered with Mimecast\n - mimecastAppKey: Application Key from the Microsoft Sentinel app registered with Mimecast\n - mimecastAccessKey: Access Key for the dedicated Mimecast user\n - mimecastSecretKey: Secret Key for dedicated Mimecast user\n - mimecastBaseURL: Regional Mimecast API Base URL\n - activeDirectoryAppId: Azure portal ---> App registrations ---> [your_app] ---> Application ID\n - activeDirectoryAppSecret: Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> [your_app_secret]\n - workspaceId: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Workspace ID (or you can copy workspaceId from above) \n - workspaceKey: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Primary Key (or you can copy workspaceKey from above) \n - AppInsightsWorkspaceResourceID : Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Properties ---> Resource ID \n\n >Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n\n6. Go to ***Azure portal ---> Resource groups ---> [your_resource_group] ---> [appName](type: Storage account) ---> Storage Explorer ---> BLOB CONTAINERS ---> TIR checkpoints ---> Upload*** and create empty file on your machine named checkpoint.txt and select it for upload (this is done so that date_range for TIR logs is stored in consistent state)\n""}, {""title"": ""Additional configuration:"", ""description"": "">Connect to a **Threat Intelligence Platforms** Data Connector. Follow instructions on the connector page and then click connect button.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Mimecast API credentials"", ""description"": ""You need to have the following pieces of information to configure the integration:\n- mimecastEmail: Email address of a dedicated Mimecast admin user\n- mimecastPassword: Password for the dedicated Mimecast admin user\n- mimecastAppId: API Application Id of the Mimecast Microsoft Sentinel app registered with Mimecast\n- mimecastAppKey: API Application Key of the Mimecast Microsoft Sentinel app registered with Mimecast\n- mimecastAccessKey: Access Key for the dedicated Mimecast admin user\n- mimecastSecretKey: Secret Key for the dedicated Mimecast admin user\n- mimecastBaseURL: Mimecast Regional API Base URL\n\n> The Mimecast Application Id, Application Key, along with the Access Key and Secret keys for the dedicated Mimecast admin user are obtainable via the Mimecast Administration Console: Administration | Services | API and Platform Integrations.\n\n> The Mimecast API Base URL for each region is documented here: https://integrations.mimecast.com/documentation/api-overview/global-base-urls/""}, {""name"": ""Resource group"", ""description"": ""You need to have a resource group created with a subscription you are going to use.""}, {""name"": ""Functions app"", ""description"": ""You need to have an Azure App registered for this connector to use\n1. Application Id\n2. Tenant Id\n3. Client Id\n4. Client Secret""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastTIRegional/Data%20Connectors/MimecastTIRegional_API_AzureFunctionApp.json","true" -"ThreatIntelligenceIndicator","MimecastTIRegional","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastTIRegional","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecasttiregional","2023-08-23","2023-09-11","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastTIRegionalConnectorAzureFunctions","Mimecast","Mimecast Intelligence for Microsoft - Microsoft Sentinel","The data connector for Mimecast Intelligence for Microsoft provides regional threat intelligence curated from Mimecast’s email inspection technologies with pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times.
Mimecast products and features required:
- Mimecast Secure Email Gateway
- Mimecast Threat Intelligence
","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Configuration:"", ""description"": ""**STEP 1 - Configuration steps for the Mimecast API**\n\nGo to ***Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> New client secret*** and create a new secret (save the Value somewhere safe right away because you will not be able to preview it later)""}, {""title"": """", ""description"": ""**STEP 2 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Mimecast API authorization key(s) or Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Enable Mimecast Intelligence for Microsoft - Microsoft Sentinel Connector:"", ""description"": ""\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastTIRegional-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the following fields:\n - appName: Unique string that will be used as id for the app in Azure platform\n - objectId: Azure portal ---> Azure Active Directory ---> more info ---> Profile -----> Object ID\n - appInsightsLocation(default): westeurope\n - mimecastEmail: Email address of dedicated user for this integraion\n - mimecastPassword: Password for dedicated user\n - mimecastAppId: Application Id from the Microsoft Sentinel app registered with Mimecast\n - mimecastAppKey: Application Key from the Microsoft Sentinel app registered with Mimecast\n - mimecastAccessKey: Access Key for the dedicated Mimecast user\n - mimecastSecretKey: Secret Key for dedicated Mimecast user\n - mimecastBaseURL: Regional Mimecast API Base URL\n - activeDirectoryAppId: Azure portal ---> App registrations ---> [your_app] ---> Application ID\n - activeDirectoryAppSecret: Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> [your_app_secret]\n - workspaceId: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Workspace ID (or you can copy workspaceId from above) \n - workspaceKey: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Primary Key (or you can copy workspaceKey from above) \n - AppInsightsWorkspaceResourceID : Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Properties ---> Resource ID \n\n >Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n\n6. Go to ***Azure portal ---> Resource groups ---> [your_resource_group] ---> [appName](type: Storage account) ---> Storage Explorer ---> BLOB CONTAINERS ---> TIR checkpoints ---> Upload*** and create empty file on your machine named checkpoint.txt and select it for upload (this is done so that date_range for TIR logs is stored in consistent state)\n""}, {""title"": ""Additional configuration:"", ""description"": "">Connect to a **Threat Intelligence Platforms** Data Connector. Follow instructions on the connector page and then click connect button.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Mimecast API credentials"", ""description"": ""You need to have the following pieces of information to configure the integration:\n- mimecastEmail: Email address of a dedicated Mimecast admin user\n- mimecastPassword: Password for the dedicated Mimecast admin user\n- mimecastAppId: API Application Id of the Mimecast Microsoft Sentinel app registered with Mimecast\n- mimecastAppKey: API Application Key of the Mimecast Microsoft Sentinel app registered with Mimecast\n- mimecastAccessKey: Access Key for the dedicated Mimecast admin user\n- mimecastSecretKey: Secret Key for the dedicated Mimecast admin user\n- mimecastBaseURL: Mimecast Regional API Base URL\n\n> The Mimecast Application Id, Application Key, along with the Access Key and Secret keys for the dedicated Mimecast admin user are obtainable via the Mimecast Administration Console: Administration | Services | API and Platform Integrations.\n\n> The Mimecast API Base URL for each region is documented here: https://integrations.mimecast.com/documentation/api-overview/global-base-urls/""}, {""name"": ""Resource group"", ""description"": ""You need to have a resource group created with a subscription you are going to use.""}, {""name"": ""Functions app"", ""description"": ""You need to have an Azure App registered for this connector to use\n1. Application Id\n2. Tenant Id\n3. Client Id\n4. Client Secret""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastTIRegional/Data%20Connectors/MimecastTIRegional_API_AzureFunctionApp.json","true" -"","MimecastTTP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastTTP","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecastttp","2022-02-24","2022-02-24","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","","","","","","","false","","false" -"MimecastTTPAttachment_CL","MimecastTTP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastTTP","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecastttp","2022-02-24","2022-02-24","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastTTPAPI","Mimecast","Mimecast Targeted Threat Protection","The data connector for [Mimecast Targeted Threat Protection](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) provides customers with the visibility into security events related to the Targeted Threat Protection inspection technologies within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities.
The Mimecast products included within the connector are:
- URL Protect
- Impersonation Protect
- Attachment Protect
","[{""title"": ""Resource group"", ""description"": ""You need to have a resource group created with a subscription you are going to use.""}, {""title"": ""Functions app"", ""description"": ""You need to have an Azure App registered for this connector to use\n1. Application Id\n2. Tenant Id\n3. Client Id\n4. Client Secret""}, {""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Configuration:"", ""description"": ""**STEP 1 - Configuration steps for the Mimecast API**\n\nGo to ***Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> New client secret*** and create a new secret (save the Value somewhere safe right away because you will not be able to preview it later)""}, {""title"": """", ""description"": ""**STEP 2 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Mimecast API authorization key(s) or Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Deploy the Mimecast Targeted Threat Protection Data Connector:"", ""description"": ""\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastTTP-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the following fields:\n - appName: Unique string that will be used as id for the app in Azure platform\n - objectId: Azure portal ---> Azure Active Directory ---> more info ---> Profile -----> Object ID\n - appInsightsLocation(default): westeurope\n - mimecastEmail: Email address of dedicated user for this integraion\n - mimecastPassword: Password for dedicated user\n - mimecastAppId: Application Id from the Microsoft Sentinel app registered with Mimecast\n - mimecastAppKey: Application Key from the Microsoft Sentinel app registered with Mimecast\n - mimecastAccessKey: Access Key for the dedicated Mimecast user\n - mimecastSecretKey: Secret Key for dedicated Mimecast user\n - mimecastBaseURL: Regional Mimecast API Base URL\n - activeDirectoryAppId: Azure portal ---> App registrations ---> [your_app] ---> Application ID\n - activeDirectoryAppSecret: Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> [your_app_secret]\n - workspaceId: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Workspace ID (or you can copy workspaceId from above) \n - workspaceKey: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Primary Key (or you can copy workspaceKey from above) \n - AppInsightsWorkspaceResourceID : Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Properties ---> Resource ID \n\n >Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n\n6. Go to ***Azure portal ---> Resource groups ---> [your_resource_group] ---> [appName](type: Storage account) ---> Storage Explorer ---> BLOB CONTAINERS ---> TTP checkpoints ---> Upload*** and create empty files on your machine named attachment-checkpoint.txt, impersonation-checkpoint.txt, url-checkpoint.txt and select them for upload (this is done so that date_range for TTP logs are stored in consistent state)\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""You need to have the following pieces of information to configure the integration:\n- mimecastEmail: Email address of a dedicated Mimecast admin user\n- mimecastPassword: Password for the dedicated Mimecast admin user\n- mimecastAppId: API Application Id of the Mimecast Microsoft Sentinel app registered with Mimecast\n- mimecastAppKey: API Application Key of the Mimecast Microsoft Sentinel app registered with Mimecast\n- mimecastAccessKey: Access Key for the dedicated Mimecast admin user\n- mimecastSecretKey: Secret Key for the dedicated Mimecast admin user\n- mimecastBaseURL: Mimecast Regional API Base URL\n\n> The Mimecast Application Id, Application Key, along with the Access Key and Secret keys for the dedicated Mimecast admin user are obtainable via the Mimecast Administration Console: Administration | Services | API and Platform Integrations.\n\n> The Mimecast API Base URL for each region is documented here: https://integrations.mimecast.com/documentation/api-overview/global-base-urls/""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastTTP/Data%20Connectors/MimecastTTP_API_FunctionApp.json","true" -"MimecastTTPImpersonation_CL","MimecastTTP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastTTP","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecastttp","2022-02-24","2022-02-24","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastTTPAPI","Mimecast","Mimecast Targeted Threat Protection","The data connector for [Mimecast Targeted Threat Protection](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) provides customers with the visibility into security events related to the Targeted Threat Protection inspection technologies within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities.
The Mimecast products included within the connector are:
- URL Protect
- Impersonation Protect
- Attachment Protect
","[{""title"": ""Resource group"", ""description"": ""You need to have a resource group created with a subscription you are going to use.""}, {""title"": ""Functions app"", ""description"": ""You need to have an Azure App registered for this connector to use\n1. Application Id\n2. Tenant Id\n3. Client Id\n4. Client Secret""}, {""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Configuration:"", ""description"": ""**STEP 1 - Configuration steps for the Mimecast API**\n\nGo to ***Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> New client secret*** and create a new secret (save the Value somewhere safe right away because you will not be able to preview it later)""}, {""title"": """", ""description"": ""**STEP 2 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Mimecast API authorization key(s) or Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Deploy the Mimecast Targeted Threat Protection Data Connector:"", ""description"": ""\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastTTP-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the following fields:\n - appName: Unique string that will be used as id for the app in Azure platform\n - objectId: Azure portal ---> Azure Active Directory ---> more info ---> Profile -----> Object ID\n - appInsightsLocation(default): westeurope\n - mimecastEmail: Email address of dedicated user for this integraion\n - mimecastPassword: Password for dedicated user\n - mimecastAppId: Application Id from the Microsoft Sentinel app registered with Mimecast\n - mimecastAppKey: Application Key from the Microsoft Sentinel app registered with Mimecast\n - mimecastAccessKey: Access Key for the dedicated Mimecast user\n - mimecastSecretKey: Secret Key for dedicated Mimecast user\n - mimecastBaseURL: Regional Mimecast API Base URL\n - activeDirectoryAppId: Azure portal ---> App registrations ---> [your_app] ---> Application ID\n - activeDirectoryAppSecret: Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> [your_app_secret]\n - workspaceId: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Workspace ID (or you can copy workspaceId from above) \n - workspaceKey: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Primary Key (or you can copy workspaceKey from above) \n - AppInsightsWorkspaceResourceID : Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Properties ---> Resource ID \n\n >Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n\n6. Go to ***Azure portal ---> Resource groups ---> [your_resource_group] ---> [appName](type: Storage account) ---> Storage Explorer ---> BLOB CONTAINERS ---> TTP checkpoints ---> Upload*** and create empty files on your machine named attachment-checkpoint.txt, impersonation-checkpoint.txt, url-checkpoint.txt and select them for upload (this is done so that date_range for TTP logs are stored in consistent state)\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""You need to have the following pieces of information to configure the integration:\n- mimecastEmail: Email address of a dedicated Mimecast admin user\n- mimecastPassword: Password for the dedicated Mimecast admin user\n- mimecastAppId: API Application Id of the Mimecast Microsoft Sentinel app registered with Mimecast\n- mimecastAppKey: API Application Key of the Mimecast Microsoft Sentinel app registered with Mimecast\n- mimecastAccessKey: Access Key for the dedicated Mimecast admin user\n- mimecastSecretKey: Secret Key for the dedicated Mimecast admin user\n- mimecastBaseURL: Mimecast Regional API Base URL\n\n> The Mimecast Application Id, Application Key, along with the Access Key and Secret keys for the dedicated Mimecast admin user are obtainable via the Mimecast Administration Console: Administration | Services | API and Platform Integrations.\n\n> The Mimecast API Base URL for each region is documented here: https://integrations.mimecast.com/documentation/api-overview/global-base-urls/""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastTTP/Data%20Connectors/MimecastTTP_API_FunctionApp.json","true" -"MimecastTTPUrl_CL","MimecastTTP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastTTP","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecastttp","2022-02-24","2022-02-24","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastTTPAPI","Mimecast","Mimecast Targeted Threat Protection","The data connector for [Mimecast Targeted Threat Protection](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) provides customers with the visibility into security events related to the Targeted Threat Protection inspection technologies within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities.
The Mimecast products included within the connector are:
- URL Protect
- Impersonation Protect
- Attachment Protect
","[{""title"": ""Resource group"", ""description"": ""You need to have a resource group created with a subscription you are going to use.""}, {""title"": ""Functions app"", ""description"": ""You need to have an Azure App registered for this connector to use\n1. Application Id\n2. Tenant Id\n3. Client Id\n4. Client Secret""}, {""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Configuration:"", ""description"": ""**STEP 1 - Configuration steps for the Mimecast API**\n\nGo to ***Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> New client secret*** and create a new secret (save the Value somewhere safe right away because you will not be able to preview it later)""}, {""title"": """", ""description"": ""**STEP 2 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Mimecast API authorization key(s) or Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Deploy the Mimecast Targeted Threat Protection Data Connector:"", ""description"": ""\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastTTP-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the following fields:\n - appName: Unique string that will be used as id for the app in Azure platform\n - objectId: Azure portal ---> Azure Active Directory ---> more info ---> Profile -----> Object ID\n - appInsightsLocation(default): westeurope\n - mimecastEmail: Email address of dedicated user for this integraion\n - mimecastPassword: Password for dedicated user\n - mimecastAppId: Application Id from the Microsoft Sentinel app registered with Mimecast\n - mimecastAppKey: Application Key from the Microsoft Sentinel app registered with Mimecast\n - mimecastAccessKey: Access Key for the dedicated Mimecast user\n - mimecastSecretKey: Secret Key for dedicated Mimecast user\n - mimecastBaseURL: Regional Mimecast API Base URL\n - activeDirectoryAppId: Azure portal ---> App registrations ---> [your_app] ---> Application ID\n - activeDirectoryAppSecret: Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> [your_app_secret]\n - workspaceId: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Workspace ID (or you can copy workspaceId from above) \n - workspaceKey: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Primary Key (or you can copy workspaceKey from above) \n - AppInsightsWorkspaceResourceID : Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Properties ---> Resource ID \n\n >Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n\n6. Go to ***Azure portal ---> Resource groups ---> [your_resource_group] ---> [appName](type: Storage account) ---> Storage Explorer ---> BLOB CONTAINERS ---> TTP checkpoints ---> Upload*** and create empty files on your machine named attachment-checkpoint.txt, impersonation-checkpoint.txt, url-checkpoint.txt and select them for upload (this is done so that date_range for TTP logs are stored in consistent state)\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""You need to have the following pieces of information to configure the integration:\n- mimecastEmail: Email address of a dedicated Mimecast admin user\n- mimecastPassword: Password for the dedicated Mimecast admin user\n- mimecastAppId: API Application Id of the Mimecast Microsoft Sentinel app registered with Mimecast\n- mimecastAppKey: API Application Key of the Mimecast Microsoft Sentinel app registered with Mimecast\n- mimecastAccessKey: Access Key for the dedicated Mimecast admin user\n- mimecastSecretKey: Secret Key for the dedicated Mimecast admin user\n- mimecastBaseURL: Mimecast Regional API Base URL\n\n> The Mimecast Application Id, Application Key, along with the Access Key and Secret keys for the dedicated Mimecast admin user are obtainable via the Mimecast Administration Console: Administration | Services | API and Platform Integrations.\n\n> The Mimecast API Base URL for each region is documented here: https://integrations.mimecast.com/documentation/api-overview/global-base-urls/""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastTTP/Data%20Connectors/MimecastTTP_API_FunctionApp.json","true" -"","Minemeld","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Minemeld","azuresentinel","azure-sentinel-solution-minemeld","2022-10-11","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"","Miro","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Miro","realtimeboardincdbamiro1645117589045","azure-sentinel-solution-miro","","","","Miro","Partner","https://help.miro.com","","domains","","","","","","","false","","false" -"MiroAuditLogs_CL","Miro","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Miro","realtimeboardincdbamiro1645117589045","azure-sentinel-solution-miro","","","","Miro","Partner","https://help.miro.com","","domains","MiroAuditLogsDataConnector","Miro","Miro Audit Logs (Enterprise Plan)","The [Miro Audit Logs](https://help.miro.com/hc/en-us/articles/360017571434-Audit-logs) data connector enables you to ingest organization-wide audit events from Miro into Microsoft Sentinel. Monitor user activities, security events, content access, team changes, and administrative actions to enhance your security operations and compliance capabilities.

**Key features:**
- Track user authentication and access patterns.
- Monitor content creation, sharing, and deletion.
- Audit team and organization configuration changes.
- Detect suspicious activities and policy violations.
- Meet compliance and regulatory requirements.

**Requirements:**
- **Miro Plan**: [Enterprise Plan](https://miro.com/pricing/).
- **OAuth scope**: `auditlogs:read`.
- **Role**: Company Admin in your Miro organization.

💡 **Not on Enterprise Plan yet?** Upgrade to [Miro Enterprise](https://miro.com/enterprise/) to unlock audit logs and gain comprehensive visibility into your team's activities in Microsoft Sentinel.

For detailed instructions, refer to the [documentation](https://help.miro.com/hc/en-us/articles/31325908249362).","[{""description"": ""**Step 1: Verify your Miro plan**\n\n1. Ensure your organization has an active [Miro Enterprise Plan](https://miro.com/pricing/).\n2. If you need to upgrade, contact [Miro Sales](https://miro.com/contact/sales/) or your account manager.\n3. You must be a **Company Admin** to set up this integration.""}, {""description"": ""**Step 2: Choose your setup option**\n\nThere are two ways to set up the Miro Audit Logs connector.\n\n**Option 1 (recommended):** Use Enterprise integrations\n- Simplest setup with automatic token generation.\n- Recommended for most users.\n- See Option 1 below.\n\n**Option 2 (alternative):** Create custom OAuth application\n- More control over OAuth app configuration.\n- For advanced users or custom integration needs.\n- See Option 2 below.\n\n**Note:** When using Option 1, the integration is automatically tied to the team with the largest number of users in your organization. When using Option 2, you can choose which team to install the app to. However, **the team selection does not affect which logs are collected**\u2014both options provide organization-wide log access. All integration-relevant events from all teams are included in your logs.""}, {""description"": ""**Option 1: Enterprise integrations (recommended)**\n\n1. Open [Miro Company Settings](https://miro.com/app/settings/).\n2. Expand the **Apps and integrations** section.\n3. Click **Enterprise integrations**.\n4. Enable the **SIEM** toggle.\n5. Copy the **Access Token** value that appears.\n6. **Important:** Store the token securely\u2014it provides full access to audit logs.\n7. The token will work until you disable the toggle.\n8. Proceed to Step 3.""}, {""description"": ""**Option 2: Custom OAuth application (alternative)**\n\n1. Go to [Miro App Settings](https://miro.com/app/settings/user-profile/apps).\n2. Click **Create new app**.\n3. Select **Non-expiring access token** option during app creation.\n4. Enable the OAuth scope: **`auditlogs:read`**.\n5. Click **Install app and get OAuth token**.\n6. Authorize the app to access your organization.\n7. Copy the **Access Token** that is displayed.\n8. **Important:** Store the token securely\u2014it provides full access to audit logs.\n9. The token will work until you uninstall the app.""}, {""description"": ""**Step 3: Learn more**\n\nFor detailed information about Miro audit logs:\n- [Miro Audit Logs documentation](https://help.miro.com/hc/en-us/articles/360017571434-Audit-logs)\n- [Miro API reference](https://developers.miro.com/reference/enterprise-get-audit-logs)\n- [OAuth non-expiring tokens](https://developers.miro.com/reference/authorization-flow-for-expiring-access-tokens)\n- [Enterprise integrations settings](https://miro.com/app/settings/)""}, {""description"": ""**Step 4: Connect to Miro**\n\nProvide your Miro access token below to complete the connection."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Access token"", ""placeholder"": ""Enter your Miro Access Token"", ""type"": ""password"", ""name"": ""AccessToken""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}}], ""title"": ""Connect to Miro to start collecting audit logs in Microsoft Sentinel.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Miro Enterprise Plan"", ""description"": ""Miro Enterprise Plan subscription is required.""}, {""name"": ""Miro OAuth Application"", ""description"": ""Miro OAuth application with auditlogs:read scope and Company Admin role is required.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Miro/Data%20Connectors/MiroAuditLogs_CCF/MiroAuditLogs_DataConnectorDefinition.json","true" -"MiroContentLogs_CL","Miro","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Miro","realtimeboardincdbamiro1645117589045","azure-sentinel-solution-miro","","","","Miro","Partner","https://help.miro.com","","domains","MiroContentLogsDataConnector","Miro","Miro Content Logs (Enterprise Plan + Enterprise Guard)","The [Miro Content Logs](https://help.miro.com/hc/en-us/articles/17774729839378-Content-Logs-overview) data connector enables you to ingest content activity logs from Miro into Microsoft Sentinel. Part of Miro's Enterprise Guard eDiscovery capabilities, this connector provides content-level visibility for compliance, legal hold, and advanced threat detection.

**Key features:**
- Track all content item changes.
- Monitor content modifications by user and timestamp.
- Support compliance and eDiscovery requirements.
- Detect data exfiltration and insider threats.
- Meet regulatory and legal hold obligations.

**Requirements:**
- **Miro Plan**: [Enterprise Plan](https://miro.com/pricing/) + **Enterprise Guard** add-on.
- **OAuth scope**: `contentlogs:export`.
- **Role**: Company Admin in your Miro organization.
- **Organization ID**: Your Miro organization identifier.

💡 **Not on Enterprise Plan yet?** Upgrade to [Miro Enterprise](https://miro.com/enterprise/) to unlock advanced security and compliance features for your team's collaboration activities in Microsoft Sentinel.

💡 **Need Content Logs?** Content activity logging is part of [Miro Enterprise Guard](https://miro.com/enterprise-guard/), which provides advanced security, compliance, and eDiscovery features. Contact your Miro account manager to add Enterprise Guard to your Enterprise Plan and unlock content-level monitoring in Microsoft Sentinel.

**Note:** If you only have the base Enterprise Plan (without Enterprise Guard), please use the **Miro Audit Logs** connector instead for organization-level event monitoring.

For detailed instructions, refer to the [documentation](https://help.miro.com/hc/en-us/articles/31325908249362).","[{""description"": ""**Step 1: Verify your Miro plan and Enterprise Guard**\n\n1. Ensure your organization has [Miro Enterprise Plan](https://miro.com/pricing/) with **Enterprise Guard** add-on.\n2. Content logs are part of Miro's eDiscovery (Enterprise Guard) features.\n3. If you don't have Enterprise Guard yet, contact your [Miro account manager](https://miro.com/contact/sales/) to upgrade.\n4. Without Enterprise Guard, use the **Miro Audit Logs** connector for organization-level monitoring.\n5. You must be a **Company Admin** to set up this integration.""}, {""description"": ""**Step 2: Choose your setup option**\n\nThere are two ways to set up the Miro Content Logs connector.\n\n**Option 1 (recommended):** Use Enterprise integrations\n- Simplest setup with automatic token generation.\n- Recommended for most users.\n- See Option 1 below.\n\n**Option 2 (alternative):** Create custom OAuth application\n- More control over OAuth app configuration.\n- For advanced users or custom integration needs.\n- See Option 2 below.\n\n**Note:** When using Option 1, the integration is automatically tied to the team with the largest number of users in your organization. When using Option 2, you can choose which team to install the app to. However, **the team selection does not affect which logs are collected**\u2014both options provide organization-wide log access. All integration-relevant events from all teams are included in your logs.""}, {""description"": ""**Option 1: Enterprise integrations (recommended)**\n\n1. Open [Miro Company Settings](https://miro.com/app/settings/).\n2. Expand the **Apps and integrations** section.\n3. Click **Enterprise integrations**.\n4. Enable the **eDiscovery** toggle.\n5. Copy the **Access Token** value that appears.\n6. Get your **Organization ID** from the browser URL:\n - Look at the browser URL to find your Organization ID.\n - The URL format is: `https://miro.com/app/settings/company/{ORGANIZATION_ID}/`.\n - Copy your Organization ID from the URL (the numeric value).\n7. **Important:** Store both the token and Organization ID securely\u2014they provide full access to content logs.\n8. The token will work until you disable the toggle.\n9. Proceed to Step 3.""}, {""description"": ""**Option 2: Custom OAuth application (alternative)**\n\n1. Go to [Miro App Settings](https://miro.com/app/settings/user-profile/apps).\n2. Click **Create new app**.\n3. Select **Non-expiring access token** option during app creation.\n4. Enable the OAuth scope: **`contentlogs:export`**.\n5. Click **Install app and get OAuth token**.\n6. Authorize the app to access your organization.\n7. Copy the **Access Token** that is displayed.\n8. Get your **Organization ID**:\n - Go to [Miro Company Settings](https://miro.com/app/settings/).\n - Look at the browser URL to find your Organization ID.\n - The URL format is: `https://miro.com/app/settings/company/{ORGANIZATION_ID}/`.\n - Copy your Organization ID from the URL (the numeric value).\n9. **Important:** Store both the token and Organization ID securely\u2014they provide full access to content logs.\n10. The token will work until you uninstall the app.""}, {""description"": ""**Step 3: Learn more**\n\nFor detailed information about Miro content logs and eDiscovery:\n- [Miro Content Logs overview](https://help.miro.com/hc/en-us/articles/17774729839378-Content-Logs-overview)\n- [Miro Enterprise Guard](https://miro.com/enterprise-guard/)\n- [Miro API reference](https://developers.miro.com/reference/enterprise-board-content-item-logs-fetch)\n- [OAuth non-expiring tokens](https://developers.miro.com/reference/authorization-flow-for-expiring-access-tokens)\n- [Enterprise integrations settings](https://miro.com/app/settings/)""}, {""description"": ""**Step 4: Connect to Miro**\n\nProvide the required values below to complete the connection."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Organization ID"", ""placeholder"": ""Enter your Miro Organization ID"", ""type"": ""text"", ""name"": ""organizationId""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Access token"", ""placeholder"": ""Enter your Miro Access Token"", ""type"": ""password"", ""name"": ""AccessToken""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}}], ""title"": ""Connect to Miro to start collecting content logs in Microsoft Sentinel.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Miro Enterprise Plan with Enterprise Guard"", ""description"": ""Miro Enterprise Plan with Enterprise Guard add-on is required. Content logs are part of Miro's eDiscovery features and are not available on base Enterprise Plan or lower tiers.""}, {""name"": ""Miro OAuth Application"", ""description"": ""Miro OAuth application with contentlogs:export scope and Company Admin role is required.""}, {""name"": ""Miro Organization ID"", ""description"": ""Your Miro organization ID is required to access content logs.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Miro/Data%20Connectors/MiroContentLogs_CCF/MiroContentLogs_DataConnectorDefinition.json","true" -"","MongoDBAtlas","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MongoDBAtlas","mongodb","azure-sentinel-solution-mongodbatlas","2025-08-22","","","MongoDB","Partner","https://www.mongodb.com/company/contact","","domains","","","","","","","false","","false" -"MDBALogTable_CL","MongoDBAtlas","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MongoDBAtlas","mongodb","azure-sentinel-solution-mongodbatlas","2025-08-22","","","MongoDB","Partner","https://www.mongodb.com/company/contact","","domains","MongoDBAtlasLogsAzureFunctions","MongoDB","MongoDB Atlas Logs","The [MongoDBAtlas](https://www.mongodb.com/products/platform/atlas-database) Logs connector gives the capability to upload MongoDB Atlas database logs into Microsoft Sentinel through the MongoDB Atlas Administration API. Refer to the [API documentation](https://www.mongodb.com/docs/api/doc/atlas-admin-api-v2/) for more information. The connector provides the ability to get a range of database log messages for the specified hosts and specified project.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to 'MongoDB Atlas' to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": "">Ensure the workspace is added to Microsoft Sentinel before deploying the connector.""}, {""title"": ""STEP 1 - Configuration steps for the 'MongoDB Atlas Administration API'"", ""description"": ""1. [Follow these instructions](https://www.mongodb.com/docs/atlas/configure-api-access/#grant-programmatic-access-to-an-organization) to create a MongoDB Atlas service account.\n2. Copy the **Client ID** and **Client Secret** you created, also the **Group ID** (Project) and each **Cluster ID** (Hostname) required for later steps.\n3. Refer [MongoDB Atlas API documentation](https://www.mongodb.com/docs/api/doc/atlas-admin-api-v2/operation/operation-downloadgroupclusterlog) for more details.\n4. The client secret can be passed into the connector via an Azure key vault or directly into the connector.\n5. If you want to use the key vault option create a key vault, using a Vault Access Policy, with a secret named **mongodb-client-secret** and your client secret saved as the secret value.""}, {""title"": ""STEP 2 - Deploy the 'MongoDB Atlas Logs' connector and the associated Azure Function"", ""description"": ""\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#view/Microsoft_Azure_CreateUIDef/CustomDeploymentBlade/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FMongoDBAtlas%2FData%20Connectors%2FMongoDBAtlasLogs%2Fazuredeploy_Connector_MongoDBAtlasLogs_AzureFunction.json/uiFormDefinitionUri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FMongoDBAtlas%2FData%20Connectors%2FMongoDBAtlasLogs%2FcreateUiDef.json)""}, {""title"": ""STEP 3 - Set the connector parameters"", ""description"": ""1. Select the preferred **Subscription** and an existing **Resource Group**.\n2. Enter an existing **Log Analytics Workspace Resource ID** belonging to the resource group.\n3. Click **Next**\n4. Enter the **MongoDB Group ID**, a list of up to 10 **MongoDB Cluster IDs**, each on a separate line, and **MongoDB Client ID**.\n5. Choose for **Authentication Method** either **Client Secret** and copy in your client secret value or **Key Vault** and copy in the name of your key vault. \nClick **Next** \n6. Review the MongoDB filters. Select logs from at least one category. Click **Next** \n7. Review the schedule. Click **Next** \n8. Review the settings then click **Create**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""MongoDB Atlas service account **Client ID** and **Client Secret** are required. [See the documentation to learn more about creating a service account](https://www.mongodb.com/docs/atlas/configure-api-access/#grant-programmatic-access-to-an-organization)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MongoDBAtlas/Data%20Connectors/MongoDBAtlasLogs/MongoDBAtlasLogs_AzureFunction.json","true" -"","MongoDBAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MongoDBAudit","azuresentinel","azure-sentinel-solution-mongodbaudit","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"MongoDBAudit_CL","MongoDBAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MongoDBAudit","azuresentinel","azure-sentinel-solution-mongodbaudit","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MongoDB","MongoDB","[Deprecated] MongoDB Audit","MongoDB data connector provides the capability to ingest [MongoDBAudit](https://www.mongodb.com/) into Microsoft Sentinel. Refer to [MongoDB documentation](https://www.mongodb.com/docs/manual/tutorial/getting-started/) for more information.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias MongoDBAudit and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MongoDBAudit/Parsers/MongoDBAudit.txt) on the second line of the query, enter the hostname(s) of your MongoDBAudit device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Tomcat Server where the logs are generated.\n\n> Logs from MongoDB Enterprise Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure MongoDBAudit to write logs to files"", ""description"": ""Edit mongod.conf file (for Linux) or mongod.cfg (for Windows) to write logs to files:\n\n>**dbPath**: data/db\n\n>**path**: data/db/auditLog.json\n\nSet the following parameters: **dbPath** and **path**. Refer to the [MongoDB documentation for more details](https://www.mongodb.com/docs/manual/tutorial/configure-auditing/)""}, {""title"": ""3. Configure the logs to be collected"", ""description"": ""Configure the custom log directory to be collected"", ""instructions"": [{""parameters"": {""linkType"": ""OpenCustomLogsSettings""}, ""type"": ""InstallAgent""}]}, {""title"": """", ""description"": ""1. Select the link above to open your workspace advanced settings \n2. From the left pane, select **Settings**, select **Custom Logs** and click **+Add custom log**\n3. Click **Browse** to upload a sample of a MongoDBAudit log file. Then, click **Next >**\n4. Select **Timestamp** as the record delimiter and click **Next >**\n5. Select **Windows** or **Linux** and enter the path to MongoDBAudit logs based on your configuration \n6. After entering the path, click the '+' symbol to apply, then click **Next >** \n7. Add **MongoDBAudit** as the custom log Name (the '_CL' suffix will be added automatically) and click **Done**.""}, {""title"": ""Validate connectivity"", ""description"": ""It may take upwards of 20 minutes until your logs start to appear in Microsoft Sentinel.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MongoDBAudit/Data%20Connectors/Connector_MongoDBAudit.json","true" -"","Morphisec","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Morphisec","morphisec","morphisec_utpp_mss","2022-05-05","","","Morphisec","Partner","https://support.morphisec.com/support/home","","domains","","","","","","","false","","false" -"MorphisecAlerts_CL","Morphisec","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Morphisec","morphisec","morphisec_utpp_mss","2022-05-05","","","Morphisec","Partner","https://support.morphisec.com/support/home","","domains","MorphisecCCF","Morphisec","Morphisec API Data Connector (via Codeless Connector Framework)","The [Morphisec](https://www.morphisec.com/) solution for Microsoft Sentinel enables you to seamlessly ingest security alerts directly from the Morphisec API. By leveraging Morphisec's proactive breach prevention and moving target defense capabilities, this integration enriches your security operations with high-fidelity, low-noise alerts on evasive threats.
This solution provides more than just data ingestion; it equips your security team with a full suite of ready-to-use content, including: Data Connector, ASIM Parser, Analytic Rule Templates and Workbook.
With this solution, you can empower your SOC to leverage Morphisec's powerful threat prevention within a unified investigation and response workflow in Microsoft Sentinel.","[{""title"": ""Configure Morphisec Connector"", ""description"": ""1. Create an API key client in Morphisec Console with read permissions to fetch alerts. \n2. Provide the Client ID and Client Secret in the connector configuration."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Morphisec Base URL"", ""placeholder"": ""https://.morphisec.cloud"", ""type"": ""text"", ""name"": ""baseUrl""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client ID"", ""placeholder"": ""Enter the Client ID"", ""type"": ""text"", ""name"": ""clientId""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client Secret"", ""placeholder"": ""Enter the Client Secret"", ""type"": ""password"", ""name"": ""secret""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Tenant ID"", ""placeholder"": ""Enter your Morphisec Tenant ID"", ""type"": ""text"", ""name"": ""tenantId""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect to Morphisec"", ""name"": ""toggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Morphisec/Data%20Connectors/Morphisec_CCF/Morphisec_ConnectorDefinition.json","true" -"","Mulesoft","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mulesoft","azuresentinel","azure-sentinel-solution-mulesoft","2022-07-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"MuleSoft_Cloudhub_CL","Mulesoft","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mulesoft","azuresentinel","azure-sentinel-solution-mulesoft","2022-07-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MuleSoft","MuleSoft","MuleSoft Cloudhub","The [MuleSoft Cloudhub](https://www.mulesoft.com/platform/saas/cloudhub-ipaas-cloud-based-integration) data connector provides the capability to retrieve logs from Cloudhub applications using the Cloudhub API and more events into Microsoft Sentinel through the REST API. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Azure Blob Storage API to pull logs into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**MuleSoftCloudhub**](https://aka.ms/sentinel-MuleSoftCloudhub-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**Note: This data connector fetch only the logs of the CloudHub application using Platform API and not of CloudHub 2.0 application**""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the MuleSoft Cloudhub API**\n\n Follow the instructions to obtain the credentials.\n\n1. Obtain the **MuleSoftEnvId**, **MuleSoftAppName**, **MuleSoftUsername** and **MuleSoftPassword** using the [documentation](https://help.mulesoft.com/s/article/How-to-get-Cloudhub-application-information-using-Anypoint-Platform-API).\n2. Save credentials for using in the data connector.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the MuleSoft Cloudhub data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": ""**Option 1 - Azure Resource Manager (ARM) Template**\n\nUse this method for automated deployment of the MuleSoft Cloudhub data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MuleSoftCloudhubAPI-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **MuleSoftEnvId**, **MuleSoftAppName**, **MuleSoftUsername** and **MuleSoftPassword** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": """", ""description"": ""**Option 2 - Manual Deployment of Azure Functions**\n\n Use the following step-by-step instructions to deploy the MuleSoft Cloudhub data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-MuleSoftCloudhubAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. MuleSoftXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tMuleSoftEnvId\n\t\tMuleSoftAppName\n\t\tMuleSoftUsername\n\t\tMuleSoftPassword\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**MuleSoftEnvId**, **MuleSoftAppName**, **MuleSoftUsername** and **MuleSoftPassword** are required for making API calls.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mulesoft/Data%20Connectors/MuleSoft_Cloudhub_API_FunctionApp.json","true" -"","Multi Cloud Attack Coverage Essentials - Resource Abuse","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Multi%20Cloud%20Attack%20Coverage%20Essentials%20-%20Resource%20Abuse","azuresentinel","azure-sentinel-solution-multicloudattackcoverage","2023-11-22","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"","NCSC-NL NDN Cyber Threat Intelligence Sharing","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NCSC-NL%20NDN%20Cyber%20Threat%20Intelligence%20Sharing","azuresentinel","azure-sentinel-solution-ncscnlndncti","2025-05-19","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"","NGINX HTTP Server","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NGINX%20HTTP%20Server","azuresentinel","azure-sentinel-solution-nginx","2021-12-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"NGINX_CL","NGINX HTTP Server","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NGINX%20HTTP%20Server","azuresentinel","azure-sentinel-solution-nginx","2021-12-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","NGINXHTTPServer","Nginx","[Deprecated] NGINX HTTP Server","The NGINX HTTP Server data connector provides the capability to ingest [NGINX](https://nginx.org/en/) HTTP Server events into Microsoft Sentinel. Refer to [NGINX Logs documentation](https://nginx.org/en/docs/http/ngx_http_log_module.html) for more information.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias NGINXHTTPServer and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NGINX%20HTTP%20Server/Parsers/NGINXHTTPServer.txt).The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the NGINX HTTP Server where the logs are generated.\n\n> Logs from NGINX HTTP Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the custom log directory to be collected"", ""instructions"": [{""parameters"": {""linkType"": ""OpenCustomLogsSettings""}, ""type"": ""InstallAgent""}]}, {""title"": """", ""description"": ""1. Select the link above to open your workspace advanced settings \n2. From the left pane, select **Data**, select **Custom Logs** and click **Add+**\n3. Click **Browse** to upload a sample of a NGINX HTTP Server log file (e.g. access.log or error.log). Then, click **Next >**\n4. Select **New line** as the record delimiter and click **Next >**\n5. Select **Windows** or **Linux** and enter the path to NGINX HTTP logs based on your configuration. Example: \n - **Linux** Directory: '/var/log/nginx/*.log' \n6. After entering the path, click the '+' symbol to apply, then click **Next >** \n7. Add **NGINX_CL** as the custom log Name and click **Done**""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NGINX%20HTTP%20Server/Data%20Connectors/Connector_NGINX_agent.json","true" -"","NISTSP80053","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NISTSP80053","azuresentinel","azure-sentinel-solution-nistsp80053","2022-02-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"","NXLog BSM macOS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NXLog%20BSM%20macOS","nxlogltd1589381969261","nxlog_bsm_macos_mss","2022-05-02","","","NXLog","Partner","https://nxlog.co/support-tickets/add/support-ticket","","domains","","","","","","","false","","false" -"BSMmacOS_CL","NXLog BSM macOS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NXLog%20BSM%20macOS","nxlogltd1589381969261","nxlog_bsm_macos_mss","2022-05-02","","","NXLog","Partner","https://nxlog.co/support-tickets/add/support-ticket","","domains","NXLogBSMmacOS","NXLog","NXLog BSM macOS","The [NXLog BSM](https://docs.nxlog.co/refman/current/im/bsm.html) macOS data connector uses Sun's Basic Security Module (BSM) Auditing API to read events directly from the kernel for capturing audit events on the macOS platform. This REST API connector can efficiently export macOS audit events to Microsoft Sentinel in real-time.","[{""title"": """", ""description"": ""Follow the step-by-step instructions in the *NXLog User Guide* Integration Topic [Microsoft Sentinel](https://docs.nxlog.co/userguide/integrate/microsoft-azure-sentinel.html) to configure this connector."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NXLog%20BSM%20macOS/Data%20Connectors/NXLogBSMmacOS.json","true" -"","NXLog FIM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NXLog%20FIM","nxlogltd1589381969261","nxlog_fim","2022-08-15","","","NXLog","Partner","https://nxlog.co/support-tickets/add/support-ticket","","domains","","","","","","","false","","false" -"NXLogFIM_CL","NXLog FIM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NXLog%20FIM","nxlogltd1589381969261","nxlog_fim","2022-08-15","","","NXLog","Partner","https://nxlog.co/support-tickets/add/support-ticket","","domains","NXLogFIM","NXLog","NXLog FIM","The [NXLog FIM](https://docs.nxlog.co/refman/current/im/fim.html) module allows for the scanning of files and directories, reporting detected additions, changes, renames and deletions on the designated paths through calculated checksums during successive scans. This REST API connector can efficiently export the configured FIM events to Microsoft Sentinel in real time.","[{""title"": """", ""description"": ""Follow the step-by-step instructions in the [Microsoft Sentinel](https://docs.nxlog.co/userguide/integrate/microsoft-azure-sentinel.html) integration chapter of the *NXLog User Guide* to configure this connector."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NXLog%20FIM/Data%20Connectors/NXLogFIM.json","true" -"","NXLog LinuxAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NXLog%20LinuxAudit","nxlogltd1589381969261","nxlog_linuxaudit_mss","2022-05-05","","","NXLog","Partner","https://nxlog.co/support-tickets/add/support-ticket","","domains","","","","","","","false","","false" -"LinuxAudit_CL","NXLog LinuxAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NXLog%20LinuxAudit","nxlogltd1589381969261","nxlog_linuxaudit_mss","2022-05-05","","","NXLog","Partner","https://nxlog.co/support-tickets/add/support-ticket","","domains","NXLogLinuxAudit","NXLog","NXLog LinuxAudit","The [NXLog LinuxAudit](https://docs.nxlog.co/refman/current/im/linuxaudit.html) data connector supports custom audit rules and collects logs without auditd or any other user-space software. IP addresses and group/user IDs are resolved to their respective names making [Linux audit](https://docs.nxlog.co/userguide/integrate/linux-audit.html) logs more intelligible to security analysts. This REST API connector can efficiently export Linux security events to Microsoft Sentinel in real-time.","[{""title"": """", ""description"": ""Follow the step-by-step instructions in the *NXLog User Guide* Integration Topic [Microsoft Sentinel](https://docs.nxlog.co/userguide/integrate/microsoft-azure-sentinel.html) to configure this connector."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NXLog%20LinuxAudit/Data%20Connectors/NXLogLinuxAudit.json","true" -"","NXLogAixAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NXLogAixAudit","nxlogltd1589381969261","nxlog_aix_audit","2022-05-05","","","NXLog","Partner","https://nxlog.co/support-tickets/add/support-ticket","","domains","","","","","","","false","","false" -"AIX_Audit_CL","NXLogAixAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NXLogAixAudit","nxlogltd1589381969261","nxlog_aix_audit","2022-05-05","","","NXLog","Partner","https://nxlog.co/support-tickets/add/support-ticket","","domains","NXLogAixAudit","NXLog","NXLog AIX Audit","The [NXLog AIX Audit](https://docs.nxlog.co/refman/current/im/aixaudit.html) data connector uses the AIX Audit subsystem to read events directly from the kernel for capturing audit events on the AIX platform. This REST API connector can efficiently export AIX Audit events to Microsoft Sentinel in real time.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**NXLog_parsed_AIX_Audit_view**](https://aka.ms/sentinel-nxlogaixaudit-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": ""Follow the step-by-step instructions in the *NXLog User Guide* Integration Guide [Microsoft Sentinel](https://docs.nxlog.co/userguide/integrate/microsoft-azure-sentinel.html) to configure this connector."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NXLogAixAudit/Data%20Connectors/NXLogAixAudit.json","true" -"","NXLogDnsLogs","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NXLogDnsLogs","nxlogltd1589381969261","nxlog_dns_logs","2022-05-24","","","NXLog","Partner","https://nxlog.co/support-tickets/add/support-ticket","","domains","","","","","","","false","","false" -"NXLog_DNS_Server_CL","NXLogDnsLogs","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NXLogDnsLogs","nxlogltd1589381969261","nxlog_dns_logs","2022-05-24","","","NXLog","Partner","https://nxlog.co/support-tickets/add/support-ticket","","domains","NXLogDNSLogs","NXLog","NXLog DNS Logs","The NXLog DNS Logs data connector uses Event Tracing for Windows ([ETW](https://docs.microsoft.com/windows/apps/trace-processing/overview)) for collecting both Audit and Analytical DNS Server events. The [NXLog *im_etw* module](https://docs.nxlog.co/refman/current/im/etw.html) reads event tracing data directly for maximum efficiency, without the need to capture the event trace into an .etl file. This REST API connector can forward DNS Server events to Microsoft Sentinel in real time.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on Kusto functions deployed with the Microsoft Sentinel Solution to work as expected. The [**ASimDnsMicrosoftNXLog **](https://aka.ms/sentinel-nxlogdnslogs-parser) is designed to leverage Microsoft Sentinel's built-in DNS-related analytics capabilities."", ""instructions"": []}, {""title"": """", ""description"": ""Follow the step-by-step instructions in the *NXLog User Guide* Integration Topic [Microsoft Sentinel](https://docs.nxlog.co/userguide/integrate/microsoft-azure-sentinel.html) to configure this connector."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NXLogDnsLogs/Data%20Connectors/NXLogDnsLogs.json","true" -"","Nasuni","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Nasuni","nasunicorporation","nasuni-sentinel","2023-07-07","2023-07-07","","Nasuni","Partner","https://github.com/nasuni-labs/Azure-Sentinel","","domains","","","","","","","false","","false" -"Syslog","Nasuni","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Nasuni","nasunicorporation","nasuni-sentinel","2023-07-07","2023-07-07","","Nasuni","Partner","https://github.com/nasuni-labs/Azure-Sentinel","","domains","NasuniEdgeAppliance","Nasuni","[Deprecated] Nasuni Edge Appliance","The [Nasuni](https://www.nasuni.com/) connector allows you to easily connect your Nasuni Edge Appliance Notifications and file system audit logs with Microsoft Sentinel. This gives you more insight into activity within your Nasuni infrastructure and improves your security operation capabilities.","[{""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Follow the configuration steps below to configure your Linux machine to send Nasuni event information to Microsoft Sentinel. Refer to the [Azure Monitor Agent documenation](https://learn.microsoft.com/en-us/azure/azure-monitor/agents/agents-overview) for additional details on these steps.\nConfigure the facilities you want to collect and their severities.\n1. Select the link below to open your workspace agents configuration, and select the Syslog tab.\n2. Select Add facility and choose from the drop-down list of facilities. Repeat for all the facilities you want to add.\n3. Mark the check boxes for the desired severities for each facility.\n4. Click Apply.\n"", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure Nasuni Edge Appliance settings"", ""description"": ""Follow the instructions in the [Nasuni Management Console Guide](https://view.highspot.com/viewer/629a633ae5b4caaf17018daa?iid=5e6fbfcbc7143309f69fcfcf) to configure Nasuni Edge Appliances to forward syslog events. Use the IP address or hostname of the Linux device running the Azure Monitor Agent in the Servers configuration field for the syslog settings.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Nasuni/Data%20Connectors/Nasuni%20Data%20Connector.json","true" -"","NetClean ProActive","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NetClean%20ProActive","netcleantechnologiesab1651557549734","azure-sentinel-solution-netclean-proactive","2022-06-30","","","NetClean","Partner","https://www.netclean.com/contact","","domains","","","","","","","false","","false" -"Netclean_Incidents_CL","NetClean ProActive","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NetClean%20ProActive","netcleantechnologiesab1651557549734","azure-sentinel-solution-netclean-proactive","2022-06-30","","","NetClean","Partner","https://www.netclean.com/contact","","domains","Netclean_ProActive_Incidents","NetClean Technologies","Netclean ProActive Incidents","This connector uses the Netclean Webhook (required) and Logic Apps to push data into Microsoft Sentinel Log Analytics","[{""title"": """", ""description"": "">**NOTE:** NetClean ProActive uses a Webhook to expose incident data, Azure Logic Apps is used to receive and push data to Log Analytics This might result in additional data ingestion costs.\n It's possible to test this without Logic Apps or NetClean Proactive see option 2"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""1. Create a new logic app\n Use When a HTTP request is recived as the Trigger and save it. It will now have generated a URL that can be used in the ProActive webconsole configuration.\n Add an action:\n Select the Azure Log Analytics Data Collector and choose Send Data\n Enter Connection Name, Workspace ID and Workspace Key, you will find the information needed in your Log Analytics workspace under Settings-->Agents-->Log Analytics agent instructions.\n In JSON Request body add @triggerBody(). in Custom Log Name add Netclean_Incidents."", ""title"": "" Option 1: Logic app""}, {""description"": ""Ingest data using a api function. please use the script found on\n https://learn.microsoft.com/en-us/azure/azure-monitor/logs/data-collector-api?tabs=powershell \nReplace the CustomerId and SharedKey values with your values\nReplace the content in $json variable to the sample data found here: https://github.com/Azure/Azure-Sentinel/blob/master/Sample%20Data/Custom/Netclean_Incidents_CL.json .\nSet the LogType varible to **Netclean_Incidents_CL**\nRun the script"", ""title"": "" Option 2 (Testing only)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NetClean%20ProActive/Data%20Connectors/Connector_NetClean.json","true" -"","Netskope","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskope","netskope","netskope_mss","2022-05-05","","","Netskope","Partner","https://www.netskope.com/services#support","","domains","","","","","","","false","","false" -"Netskope_CL","Netskope","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskope","netskope","netskope_mss","2022-05-05","","","Netskope","Partner","https://www.netskope.com/services#support","","domains","Netskope","Netskope","Netskope","The [Netskope Cloud Security Platform](https://www.netskope.com/platform) connector provides the capability to ingest Netskope logs and events into Microsoft Sentinel. The connector provides visibility into Netskope Platform Events and Alerts in Microsoft Sentinel to improve monitoring and investigation capabilities.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to Netskope to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Netskope and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskope/Parsers/Netskope.txt), on the second line of the query, enter the hostname(s) of your Netskope device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Netskope API**\n\n [Follow these instructions](https://docs.netskope.com/en/rest-api-v1-overview.html) provided by Netskope to obtain an API Token. **Note:** A Netskope account is required""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Netskope connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Netskope API Authorization Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""This method provides an automated deployment of the Netskope connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-netskope-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **API Key**, and **URI**.\n - Use the following schema for the `uri` value: `https://.goskope.com` Replace `` with your domain.\n - The default **Time Interval** is set to pull the last five (5) minutes of data. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion.\n - The default **Log Types** is set to pull all 6 available log types (`alert, page, application, audit, infrastructure, network`), remove any are not required. \n - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n6. After successfully deploying the connector, download the Kusto Function to normalize the data fields. [Follow the steps](https://aka.ms/sentinelgithubparsersnetskope) to use the Kusto function alias, **Netskope**.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""This method provides the step-by-step instructions to deploy the Netskope connector manually with Azure Function.""}, {""title"": """", ""description"": ""**1. Create a Function App**\n\n1. From the Azure Portal, navigate to [Function App](https://portal.azure.com/#blade/HubsExtension/BrowseResource/resourceType/Microsoft.Web%2Fsites/kind/functionapp), and select **+ Add**.\n2. In the **Basics** tab, ensure Runtime stack is set to **Powershell Core**. \n3. In the **Hosting** tab, ensure the **Consumption (Serverless)** plan type is selected.\n4. Make other preferrable configuration changes, if needed, then click **Create**.""}, {""title"": """", ""description"": ""**2. Import Function App Code**\n\n1. In the newly created Function App, select **Functions** on the left pane and click **+ Add**.\n2. Select **Timer Trigger**.\n3. Enter a unique Function **Name** and modify the cron schedule, if needed. The default value is set to run the Function App every 5 minutes. (Note: the Timer trigger should match the `timeInterval` value below to prevent overlapping data), click **Create**.\n4. Click on **Code + Test** on the left pane. \n5. Copy the [Function App Code](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/Netskope/Data%20Connectors/Netskope/AzureFunctionNetskope/run.ps1) and paste into the Function App `run.ps1` editor.\n5. Click **Save**.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following seven (7) application settings individually, with their respective string values (case-sensitive): \n\t\tapikey\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\turi\n\t\ttimeInterval\n\t\tlogTypes\n\t\tlogAnalyticsUri (optional)\n> - Enter the URI that corresponds to your region. The `uri` value must follow the following schema: `https://.goskope.com` - There is no need to add subsquent parameters to the Uri, the Function App will dynamically append the parameteres in the proper format.\n> - Set the `timeInterval` (in minutes) to the default value of `5` to correspond to the default Timer Trigger of every `5` minutes. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly to prevent overlapping data ingestion. \n> - Set the `logTypes` to `alert, page, application, audit, infrastructure, network` - This list represents all the avaliable log types. Select the log types based on logging requirements, seperating each by a single comma.\n> - Note: If using Azure Key Vault, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.\n5. After successfully deploying the connector, download the Kusto Function to normalize the data fields. [Follow the steps](https://aka.ms/sentinelgithubparsersnetskope) to use the Kusto function alias, **Netskope**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Netskope API Token"", ""description"": ""A Netskope API Token is required. [See the documentation to learn more about Netskope API](https://innovatechcloud.goskope.com/docs/Netskope_Help/en/rest-api-v1-overview.html). **Note:** A Netskope account is required""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskope/Data%20Connectors/Netskope/Netskope_API_FunctionApp.json","true" -"","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","","","","","","","false","","false" -"NetskopeAlerts_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeAlertsEvents","Netskope","Netskope Alerts and Events","Netskope Security Alerts and Events","[{""title"": ""STEP 1 - Create a Netskope API key."", ""description"": ""Follow the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/) for guidance on this step.""}, {""title"": ""STEP 2 - Enter your Netskope product Details"", ""description"": ""Enter your Netskope organisation url & API Token below:"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Organisation Url"", ""placeholder"": ""Enter your organisation url"", ""type"": ""text"", ""name"": ""OrganisationURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""Enter your API Key"", ""type"": ""password"", ""name"": ""apikey""}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Remediation"", ""type"": ""text"", ""name"": ""NetskopeAlertsRemediationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Remediation data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Uba"", ""type"": ""text"", ""name"": ""NetskopeAlertsUbaingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Uba data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Security Assessment"", ""type"": ""text"", ""name"": ""NetskopeAlertsSecurityAssessmentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Security Assessment data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Quarantine"", ""type"": ""text"", ""name"": ""NetskopeAlertsQuarantineingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Quarantine data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Policy"", ""type"": ""text"", ""name"": ""NetskopeAlertsPolicyingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Policy data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malware"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalwareingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malware data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malsite"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalsiteingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malsite data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts DLP"", ""type"": ""text"", ""name"": ""NetskopeAlertsDlpingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts CTEP"", ""type"": ""text"", ""name"": ""NetskopeAlertsCtepingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts CTEP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Watchlist"", ""type"": ""text"", ""name"": ""NetskopeAlertsWatchlistingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Watchlist data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Compromised Credentials"", ""type"": ""text"", ""name"": ""NetskopeAlertsCompromisedCredentialsingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Compromised Credentials data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Content"", ""type"": ""text"", ""name"": ""NetskopeAlertsContentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Content data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Device"", ""type"": ""text"", ""name"": ""NetskopeAlertsDeviceingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Device data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Application"", ""type"": ""text"", ""name"": ""NetskopeEventsApplicationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Application data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Audit"", ""type"": ""text"", ""name"": ""NetskopeEventsAuditioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Audit data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Connection"", ""type"": ""text"", ""name"": ""NetskopeEventsConnectioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Connection data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events DLP"", ""type"": ""text"", ""name"": ""NetskopeEventsDLPingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Endpoint"", ""type"": ""text"", ""name"": ""NetskopeEventsEndpointingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Endpoint data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Infrastructure"", ""type"": ""text"", ""name"": ""NetskopeEventsInfrastructureingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Infrastructure data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Network"", ""type"": ""text"", ""name"": ""NetskopeEventsNetworkingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Network data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Page"", ""type"": ""text"", ""name"": ""NetskopeEventsPageingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Page data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""InstructionStepsGroup"", ""parameters"": {""instructionSteps"": [{""title"": ""OPTIONAL: Specify the Index the API uses."", ""description"": ""**Configuring the index is optional and only required in advanced scenario's.** \n Netskope uses an [index](https://docs.netskope.com/en/using-the-rest-api-v2-dataexport-iterator-endpoints/#how-do-iterator-endpoints-function) to retrieve events. In some advanced cases (consuming the event in multiple Microsoft Sentinel workspaces, or pre-fatiguing the index to only retrieve recent data), a customer might want to have direct control over the index."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Index"", ""placeholder"": ""NetskopeCCP"", ""type"": ""text"", ""name"": ""Index""}}]}]}}]}, {""title"": ""STEP 3 - Click Connect"", ""description"": ""Verify all fields above were filled in correctly. Press the Connect to connect Netskope to Microsoft Sentinel."", ""instructions"": [{""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Netskope organisation url"", ""description"": ""The Netskope data connector requires you to provide your organisation url. You can find your organisation url by signing into the Netskope portal.""}, {""name"": ""Netskope API key"", ""description"": ""The Netskope data connector requires you to provide a valid API key. You can create one by following the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeAlertsEvents_RestAPI_CCP/NetskopeAlertsEvents_ConnectorDefination.json","true" -"NetskopeEventsApplication_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeAlertsEvents","Netskope","Netskope Alerts and Events","Netskope Security Alerts and Events","[{""title"": ""STEP 1 - Create a Netskope API key."", ""description"": ""Follow the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/) for guidance on this step.""}, {""title"": ""STEP 2 - Enter your Netskope product Details"", ""description"": ""Enter your Netskope organisation url & API Token below:"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Organisation Url"", ""placeholder"": ""Enter your organisation url"", ""type"": ""text"", ""name"": ""OrganisationURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""Enter your API Key"", ""type"": ""password"", ""name"": ""apikey""}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Remediation"", ""type"": ""text"", ""name"": ""NetskopeAlertsRemediationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Remediation data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Uba"", ""type"": ""text"", ""name"": ""NetskopeAlertsUbaingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Uba data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Security Assessment"", ""type"": ""text"", ""name"": ""NetskopeAlertsSecurityAssessmentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Security Assessment data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Quarantine"", ""type"": ""text"", ""name"": ""NetskopeAlertsQuarantineingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Quarantine data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Policy"", ""type"": ""text"", ""name"": ""NetskopeAlertsPolicyingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Policy data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malware"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalwareingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malware data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malsite"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalsiteingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malsite data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts DLP"", ""type"": ""text"", ""name"": ""NetskopeAlertsDlpingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts CTEP"", ""type"": ""text"", ""name"": ""NetskopeAlertsCtepingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts CTEP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Watchlist"", ""type"": ""text"", ""name"": ""NetskopeAlertsWatchlistingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Watchlist data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Compromised Credentials"", ""type"": ""text"", ""name"": ""NetskopeAlertsCompromisedCredentialsingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Compromised Credentials data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Content"", ""type"": ""text"", ""name"": ""NetskopeAlertsContentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Content data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Device"", ""type"": ""text"", ""name"": ""NetskopeAlertsDeviceingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Device data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Application"", ""type"": ""text"", ""name"": ""NetskopeEventsApplicationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Application data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Audit"", ""type"": ""text"", ""name"": ""NetskopeEventsAuditioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Audit data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Connection"", ""type"": ""text"", ""name"": ""NetskopeEventsConnectioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Connection data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events DLP"", ""type"": ""text"", ""name"": ""NetskopeEventsDLPingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Endpoint"", ""type"": ""text"", ""name"": ""NetskopeEventsEndpointingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Endpoint data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Infrastructure"", ""type"": ""text"", ""name"": ""NetskopeEventsInfrastructureingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Infrastructure data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Network"", ""type"": ""text"", ""name"": ""NetskopeEventsNetworkingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Network data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Page"", ""type"": ""text"", ""name"": ""NetskopeEventsPageingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Page data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""InstructionStepsGroup"", ""parameters"": {""instructionSteps"": [{""title"": ""OPTIONAL: Specify the Index the API uses."", ""description"": ""**Configuring the index is optional and only required in advanced scenario's.** \n Netskope uses an [index](https://docs.netskope.com/en/using-the-rest-api-v2-dataexport-iterator-endpoints/#how-do-iterator-endpoints-function) to retrieve events. In some advanced cases (consuming the event in multiple Microsoft Sentinel workspaces, or pre-fatiguing the index to only retrieve recent data), a customer might want to have direct control over the index."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Index"", ""placeholder"": ""NetskopeCCP"", ""type"": ""text"", ""name"": ""Index""}}]}]}}]}, {""title"": ""STEP 3 - Click Connect"", ""description"": ""Verify all fields above were filled in correctly. Press the Connect to connect Netskope to Microsoft Sentinel."", ""instructions"": [{""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Netskope organisation url"", ""description"": ""The Netskope data connector requires you to provide your organisation url. You can find your organisation url by signing into the Netskope portal.""}, {""name"": ""Netskope API key"", ""description"": ""The Netskope data connector requires you to provide a valid API key. You can create one by following the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeAlertsEvents_RestAPI_CCP/NetskopeAlertsEvents_ConnectorDefination.json","true" -"NetskopeEventsAudit_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeAlertsEvents","Netskope","Netskope Alerts and Events","Netskope Security Alerts and Events","[{""title"": ""STEP 1 - Create a Netskope API key."", ""description"": ""Follow the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/) for guidance on this step.""}, {""title"": ""STEP 2 - Enter your Netskope product Details"", ""description"": ""Enter your Netskope organisation url & API Token below:"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Organisation Url"", ""placeholder"": ""Enter your organisation url"", ""type"": ""text"", ""name"": ""OrganisationURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""Enter your API Key"", ""type"": ""password"", ""name"": ""apikey""}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Remediation"", ""type"": ""text"", ""name"": ""NetskopeAlertsRemediationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Remediation data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Uba"", ""type"": ""text"", ""name"": ""NetskopeAlertsUbaingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Uba data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Security Assessment"", ""type"": ""text"", ""name"": ""NetskopeAlertsSecurityAssessmentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Security Assessment data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Quarantine"", ""type"": ""text"", ""name"": ""NetskopeAlertsQuarantineingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Quarantine data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Policy"", ""type"": ""text"", ""name"": ""NetskopeAlertsPolicyingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Policy data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malware"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalwareingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malware data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malsite"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalsiteingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malsite data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts DLP"", ""type"": ""text"", ""name"": ""NetskopeAlertsDlpingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts CTEP"", ""type"": ""text"", ""name"": ""NetskopeAlertsCtepingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts CTEP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Watchlist"", ""type"": ""text"", ""name"": ""NetskopeAlertsWatchlistingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Watchlist data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Compromised Credentials"", ""type"": ""text"", ""name"": ""NetskopeAlertsCompromisedCredentialsingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Compromised Credentials data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Content"", ""type"": ""text"", ""name"": ""NetskopeAlertsContentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Content data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Device"", ""type"": ""text"", ""name"": ""NetskopeAlertsDeviceingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Device data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Application"", ""type"": ""text"", ""name"": ""NetskopeEventsApplicationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Application data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Audit"", ""type"": ""text"", ""name"": ""NetskopeEventsAuditioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Audit data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Connection"", ""type"": ""text"", ""name"": ""NetskopeEventsConnectioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Connection data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events DLP"", ""type"": ""text"", ""name"": ""NetskopeEventsDLPingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Endpoint"", ""type"": ""text"", ""name"": ""NetskopeEventsEndpointingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Endpoint data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Infrastructure"", ""type"": ""text"", ""name"": ""NetskopeEventsInfrastructureingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Infrastructure data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Network"", ""type"": ""text"", ""name"": ""NetskopeEventsNetworkingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Network data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Page"", ""type"": ""text"", ""name"": ""NetskopeEventsPageingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Page data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""InstructionStepsGroup"", ""parameters"": {""instructionSteps"": [{""title"": ""OPTIONAL: Specify the Index the API uses."", ""description"": ""**Configuring the index is optional and only required in advanced scenario's.** \n Netskope uses an [index](https://docs.netskope.com/en/using-the-rest-api-v2-dataexport-iterator-endpoints/#how-do-iterator-endpoints-function) to retrieve events. In some advanced cases (consuming the event in multiple Microsoft Sentinel workspaces, or pre-fatiguing the index to only retrieve recent data), a customer might want to have direct control over the index."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Index"", ""placeholder"": ""NetskopeCCP"", ""type"": ""text"", ""name"": ""Index""}}]}]}}]}, {""title"": ""STEP 3 - Click Connect"", ""description"": ""Verify all fields above were filled in correctly. Press the Connect to connect Netskope to Microsoft Sentinel."", ""instructions"": [{""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Netskope organisation url"", ""description"": ""The Netskope data connector requires you to provide your organisation url. You can find your organisation url by signing into the Netskope portal.""}, {""name"": ""Netskope API key"", ""description"": ""The Netskope data connector requires you to provide a valid API key. You can create one by following the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeAlertsEvents_RestAPI_CCP/NetskopeAlertsEvents_ConnectorDefination.json","true" -"NetskopeEventsConnection_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeAlertsEvents","Netskope","Netskope Alerts and Events","Netskope Security Alerts and Events","[{""title"": ""STEP 1 - Create a Netskope API key."", ""description"": ""Follow the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/) for guidance on this step.""}, {""title"": ""STEP 2 - Enter your Netskope product Details"", ""description"": ""Enter your Netskope organisation url & API Token below:"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Organisation Url"", ""placeholder"": ""Enter your organisation url"", ""type"": ""text"", ""name"": ""OrganisationURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""Enter your API Key"", ""type"": ""password"", ""name"": ""apikey""}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Remediation"", ""type"": ""text"", ""name"": ""NetskopeAlertsRemediationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Remediation data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Uba"", ""type"": ""text"", ""name"": ""NetskopeAlertsUbaingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Uba data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Security Assessment"", ""type"": ""text"", ""name"": ""NetskopeAlertsSecurityAssessmentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Security Assessment data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Quarantine"", ""type"": ""text"", ""name"": ""NetskopeAlertsQuarantineingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Quarantine data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Policy"", ""type"": ""text"", ""name"": ""NetskopeAlertsPolicyingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Policy data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malware"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalwareingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malware data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malsite"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalsiteingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malsite data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts DLP"", ""type"": ""text"", ""name"": ""NetskopeAlertsDlpingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts CTEP"", ""type"": ""text"", ""name"": ""NetskopeAlertsCtepingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts CTEP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Watchlist"", ""type"": ""text"", ""name"": ""NetskopeAlertsWatchlistingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Watchlist data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Compromised Credentials"", ""type"": ""text"", ""name"": ""NetskopeAlertsCompromisedCredentialsingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Compromised Credentials data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Content"", ""type"": ""text"", ""name"": ""NetskopeAlertsContentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Content data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Device"", ""type"": ""text"", ""name"": ""NetskopeAlertsDeviceingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Device data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Application"", ""type"": ""text"", ""name"": ""NetskopeEventsApplicationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Application data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Audit"", ""type"": ""text"", ""name"": ""NetskopeEventsAuditioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Audit data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Connection"", ""type"": ""text"", ""name"": ""NetskopeEventsConnectioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Connection data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events DLP"", ""type"": ""text"", ""name"": ""NetskopeEventsDLPingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Endpoint"", ""type"": ""text"", ""name"": ""NetskopeEventsEndpointingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Endpoint data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Infrastructure"", ""type"": ""text"", ""name"": ""NetskopeEventsInfrastructureingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Infrastructure data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Network"", ""type"": ""text"", ""name"": ""NetskopeEventsNetworkingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Network data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Page"", ""type"": ""text"", ""name"": ""NetskopeEventsPageingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Page data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""InstructionStepsGroup"", ""parameters"": {""instructionSteps"": [{""title"": ""OPTIONAL: Specify the Index the API uses."", ""description"": ""**Configuring the index is optional and only required in advanced scenario's.** \n Netskope uses an [index](https://docs.netskope.com/en/using-the-rest-api-v2-dataexport-iterator-endpoints/#how-do-iterator-endpoints-function) to retrieve events. In some advanced cases (consuming the event in multiple Microsoft Sentinel workspaces, or pre-fatiguing the index to only retrieve recent data), a customer might want to have direct control over the index."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Index"", ""placeholder"": ""NetskopeCCP"", ""type"": ""text"", ""name"": ""Index""}}]}]}}]}, {""title"": ""STEP 3 - Click Connect"", ""description"": ""Verify all fields above were filled in correctly. Press the Connect to connect Netskope to Microsoft Sentinel."", ""instructions"": [{""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Netskope organisation url"", ""description"": ""The Netskope data connector requires you to provide your organisation url. You can find your organisation url by signing into the Netskope portal.""}, {""name"": ""Netskope API key"", ""description"": ""The Netskope data connector requires you to provide a valid API key. You can create one by following the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeAlertsEvents_RestAPI_CCP/NetskopeAlertsEvents_ConnectorDefination.json","true" -"NetskopeEventsDLP_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeAlertsEvents","Netskope","Netskope Alerts and Events","Netskope Security Alerts and Events","[{""title"": ""STEP 1 - Create a Netskope API key."", ""description"": ""Follow the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/) for guidance on this step.""}, {""title"": ""STEP 2 - Enter your Netskope product Details"", ""description"": ""Enter your Netskope organisation url & API Token below:"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Organisation Url"", ""placeholder"": ""Enter your organisation url"", ""type"": ""text"", ""name"": ""OrganisationURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""Enter your API Key"", ""type"": ""password"", ""name"": ""apikey""}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Remediation"", ""type"": ""text"", ""name"": ""NetskopeAlertsRemediationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Remediation data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Uba"", ""type"": ""text"", ""name"": ""NetskopeAlertsUbaingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Uba data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Security Assessment"", ""type"": ""text"", ""name"": ""NetskopeAlertsSecurityAssessmentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Security Assessment data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Quarantine"", ""type"": ""text"", ""name"": ""NetskopeAlertsQuarantineingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Quarantine data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Policy"", ""type"": ""text"", ""name"": ""NetskopeAlertsPolicyingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Policy data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malware"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalwareingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malware data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malsite"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalsiteingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malsite data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts DLP"", ""type"": ""text"", ""name"": ""NetskopeAlertsDlpingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts CTEP"", ""type"": ""text"", ""name"": ""NetskopeAlertsCtepingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts CTEP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Watchlist"", ""type"": ""text"", ""name"": ""NetskopeAlertsWatchlistingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Watchlist data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Compromised Credentials"", ""type"": ""text"", ""name"": ""NetskopeAlertsCompromisedCredentialsingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Compromised Credentials data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Content"", ""type"": ""text"", ""name"": ""NetskopeAlertsContentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Content data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Device"", ""type"": ""text"", ""name"": ""NetskopeAlertsDeviceingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Device data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Application"", ""type"": ""text"", ""name"": ""NetskopeEventsApplicationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Application data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Audit"", ""type"": ""text"", ""name"": ""NetskopeEventsAuditioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Audit data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Connection"", ""type"": ""text"", ""name"": ""NetskopeEventsConnectioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Connection data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events DLP"", ""type"": ""text"", ""name"": ""NetskopeEventsDLPingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Endpoint"", ""type"": ""text"", ""name"": ""NetskopeEventsEndpointingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Endpoint data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Infrastructure"", ""type"": ""text"", ""name"": ""NetskopeEventsInfrastructureingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Infrastructure data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Network"", ""type"": ""text"", ""name"": ""NetskopeEventsNetworkingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Network data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Page"", ""type"": ""text"", ""name"": ""NetskopeEventsPageingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Page data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""InstructionStepsGroup"", ""parameters"": {""instructionSteps"": [{""title"": ""OPTIONAL: Specify the Index the API uses."", ""description"": ""**Configuring the index is optional and only required in advanced scenario's.** \n Netskope uses an [index](https://docs.netskope.com/en/using-the-rest-api-v2-dataexport-iterator-endpoints/#how-do-iterator-endpoints-function) to retrieve events. In some advanced cases (consuming the event in multiple Microsoft Sentinel workspaces, or pre-fatiguing the index to only retrieve recent data), a customer might want to have direct control over the index."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Index"", ""placeholder"": ""NetskopeCCP"", ""type"": ""text"", ""name"": ""Index""}}]}]}}]}, {""title"": ""STEP 3 - Click Connect"", ""description"": ""Verify all fields above were filled in correctly. Press the Connect to connect Netskope to Microsoft Sentinel."", ""instructions"": [{""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Netskope organisation url"", ""description"": ""The Netskope data connector requires you to provide your organisation url. You can find your organisation url by signing into the Netskope portal.""}, {""name"": ""Netskope API key"", ""description"": ""The Netskope data connector requires you to provide a valid API key. You can create one by following the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeAlertsEvents_RestAPI_CCP/NetskopeAlertsEvents_ConnectorDefination.json","true" -"NetskopeEventsEndpoint_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeAlertsEvents","Netskope","Netskope Alerts and Events","Netskope Security Alerts and Events","[{""title"": ""STEP 1 - Create a Netskope API key."", ""description"": ""Follow the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/) for guidance on this step.""}, {""title"": ""STEP 2 - Enter your Netskope product Details"", ""description"": ""Enter your Netskope organisation url & API Token below:"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Organisation Url"", ""placeholder"": ""Enter your organisation url"", ""type"": ""text"", ""name"": ""OrganisationURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""Enter your API Key"", ""type"": ""password"", ""name"": ""apikey""}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Remediation"", ""type"": ""text"", ""name"": ""NetskopeAlertsRemediationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Remediation data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Uba"", ""type"": ""text"", ""name"": ""NetskopeAlertsUbaingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Uba data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Security Assessment"", ""type"": ""text"", ""name"": ""NetskopeAlertsSecurityAssessmentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Security Assessment data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Quarantine"", ""type"": ""text"", ""name"": ""NetskopeAlertsQuarantineingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Quarantine data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Policy"", ""type"": ""text"", ""name"": ""NetskopeAlertsPolicyingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Policy data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malware"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalwareingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malware data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malsite"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalsiteingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malsite data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts DLP"", ""type"": ""text"", ""name"": ""NetskopeAlertsDlpingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts CTEP"", ""type"": ""text"", ""name"": ""NetskopeAlertsCtepingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts CTEP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Watchlist"", ""type"": ""text"", ""name"": ""NetskopeAlertsWatchlistingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Watchlist data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Compromised Credentials"", ""type"": ""text"", ""name"": ""NetskopeAlertsCompromisedCredentialsingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Compromised Credentials data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Content"", ""type"": ""text"", ""name"": ""NetskopeAlertsContentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Content data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Device"", ""type"": ""text"", ""name"": ""NetskopeAlertsDeviceingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Device data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Application"", ""type"": ""text"", ""name"": ""NetskopeEventsApplicationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Application data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Audit"", ""type"": ""text"", ""name"": ""NetskopeEventsAuditioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Audit data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Connection"", ""type"": ""text"", ""name"": ""NetskopeEventsConnectioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Connection data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events DLP"", ""type"": ""text"", ""name"": ""NetskopeEventsDLPingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Endpoint"", ""type"": ""text"", ""name"": ""NetskopeEventsEndpointingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Endpoint data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Infrastructure"", ""type"": ""text"", ""name"": ""NetskopeEventsInfrastructureingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Infrastructure data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Network"", ""type"": ""text"", ""name"": ""NetskopeEventsNetworkingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Network data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Page"", ""type"": ""text"", ""name"": ""NetskopeEventsPageingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Page data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""InstructionStepsGroup"", ""parameters"": {""instructionSteps"": [{""title"": ""OPTIONAL: Specify the Index the API uses."", ""description"": ""**Configuring the index is optional and only required in advanced scenario's.** \n Netskope uses an [index](https://docs.netskope.com/en/using-the-rest-api-v2-dataexport-iterator-endpoints/#how-do-iterator-endpoints-function) to retrieve events. In some advanced cases (consuming the event in multiple Microsoft Sentinel workspaces, or pre-fatiguing the index to only retrieve recent data), a customer might want to have direct control over the index."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Index"", ""placeholder"": ""NetskopeCCP"", ""type"": ""text"", ""name"": ""Index""}}]}]}}]}, {""title"": ""STEP 3 - Click Connect"", ""description"": ""Verify all fields above were filled in correctly. Press the Connect to connect Netskope to Microsoft Sentinel."", ""instructions"": [{""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Netskope organisation url"", ""description"": ""The Netskope data connector requires you to provide your organisation url. You can find your organisation url by signing into the Netskope portal.""}, {""name"": ""Netskope API key"", ""description"": ""The Netskope data connector requires you to provide a valid API key. You can create one by following the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeAlertsEvents_RestAPI_CCP/NetskopeAlertsEvents_ConnectorDefination.json","true" -"NetskopeEventsInfrastructure_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeAlertsEvents","Netskope","Netskope Alerts and Events","Netskope Security Alerts and Events","[{""title"": ""STEP 1 - Create a Netskope API key."", ""description"": ""Follow the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/) for guidance on this step.""}, {""title"": ""STEP 2 - Enter your Netskope product Details"", ""description"": ""Enter your Netskope organisation url & API Token below:"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Organisation Url"", ""placeholder"": ""Enter your organisation url"", ""type"": ""text"", ""name"": ""OrganisationURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""Enter your API Key"", ""type"": ""password"", ""name"": ""apikey""}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Remediation"", ""type"": ""text"", ""name"": ""NetskopeAlertsRemediationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Remediation data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Uba"", ""type"": ""text"", ""name"": ""NetskopeAlertsUbaingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Uba data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Security Assessment"", ""type"": ""text"", ""name"": ""NetskopeAlertsSecurityAssessmentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Security Assessment data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Quarantine"", ""type"": ""text"", ""name"": ""NetskopeAlertsQuarantineingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Quarantine data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Policy"", ""type"": ""text"", ""name"": ""NetskopeAlertsPolicyingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Policy data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malware"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalwareingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malware data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malsite"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalsiteingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malsite data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts DLP"", ""type"": ""text"", ""name"": ""NetskopeAlertsDlpingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts CTEP"", ""type"": ""text"", ""name"": ""NetskopeAlertsCtepingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts CTEP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Watchlist"", ""type"": ""text"", ""name"": ""NetskopeAlertsWatchlistingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Watchlist data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Compromised Credentials"", ""type"": ""text"", ""name"": ""NetskopeAlertsCompromisedCredentialsingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Compromised Credentials data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Content"", ""type"": ""text"", ""name"": ""NetskopeAlertsContentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Content data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Device"", ""type"": ""text"", ""name"": ""NetskopeAlertsDeviceingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Device data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Application"", ""type"": ""text"", ""name"": ""NetskopeEventsApplicationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Application data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Audit"", ""type"": ""text"", ""name"": ""NetskopeEventsAuditioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Audit data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Connection"", ""type"": ""text"", ""name"": ""NetskopeEventsConnectioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Connection data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events DLP"", ""type"": ""text"", ""name"": ""NetskopeEventsDLPingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Endpoint"", ""type"": ""text"", ""name"": ""NetskopeEventsEndpointingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Endpoint data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Infrastructure"", ""type"": ""text"", ""name"": ""NetskopeEventsInfrastructureingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Infrastructure data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Network"", ""type"": ""text"", ""name"": ""NetskopeEventsNetworkingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Network data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Page"", ""type"": ""text"", ""name"": ""NetskopeEventsPageingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Page data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""InstructionStepsGroup"", ""parameters"": {""instructionSteps"": [{""title"": ""OPTIONAL: Specify the Index the API uses."", ""description"": ""**Configuring the index is optional and only required in advanced scenario's.** \n Netskope uses an [index](https://docs.netskope.com/en/using-the-rest-api-v2-dataexport-iterator-endpoints/#how-do-iterator-endpoints-function) to retrieve events. In some advanced cases (consuming the event in multiple Microsoft Sentinel workspaces, or pre-fatiguing the index to only retrieve recent data), a customer might want to have direct control over the index."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Index"", ""placeholder"": ""NetskopeCCP"", ""type"": ""text"", ""name"": ""Index""}}]}]}}]}, {""title"": ""STEP 3 - Click Connect"", ""description"": ""Verify all fields above were filled in correctly. Press the Connect to connect Netskope to Microsoft Sentinel."", ""instructions"": [{""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Netskope organisation url"", ""description"": ""The Netskope data connector requires you to provide your organisation url. You can find your organisation url by signing into the Netskope portal.""}, {""name"": ""Netskope API key"", ""description"": ""The Netskope data connector requires you to provide a valid API key. You can create one by following the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeAlertsEvents_RestAPI_CCP/NetskopeAlertsEvents_ConnectorDefination.json","true" -"NetskopeEventsNetwork_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeAlertsEvents","Netskope","Netskope Alerts and Events","Netskope Security Alerts and Events","[{""title"": ""STEP 1 - Create a Netskope API key."", ""description"": ""Follow the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/) for guidance on this step.""}, {""title"": ""STEP 2 - Enter your Netskope product Details"", ""description"": ""Enter your Netskope organisation url & API Token below:"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Organisation Url"", ""placeholder"": ""Enter your organisation url"", ""type"": ""text"", ""name"": ""OrganisationURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""Enter your API Key"", ""type"": ""password"", ""name"": ""apikey""}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Remediation"", ""type"": ""text"", ""name"": ""NetskopeAlertsRemediationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Remediation data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Uba"", ""type"": ""text"", ""name"": ""NetskopeAlertsUbaingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Uba data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Security Assessment"", ""type"": ""text"", ""name"": ""NetskopeAlertsSecurityAssessmentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Security Assessment data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Quarantine"", ""type"": ""text"", ""name"": ""NetskopeAlertsQuarantineingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Quarantine data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Policy"", ""type"": ""text"", ""name"": ""NetskopeAlertsPolicyingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Policy data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malware"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalwareingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malware data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malsite"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalsiteingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malsite data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts DLP"", ""type"": ""text"", ""name"": ""NetskopeAlertsDlpingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts CTEP"", ""type"": ""text"", ""name"": ""NetskopeAlertsCtepingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts CTEP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Watchlist"", ""type"": ""text"", ""name"": ""NetskopeAlertsWatchlistingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Watchlist data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Compromised Credentials"", ""type"": ""text"", ""name"": ""NetskopeAlertsCompromisedCredentialsingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Compromised Credentials data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Content"", ""type"": ""text"", ""name"": ""NetskopeAlertsContentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Content data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Device"", ""type"": ""text"", ""name"": ""NetskopeAlertsDeviceingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Device data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Application"", ""type"": ""text"", ""name"": ""NetskopeEventsApplicationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Application data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Audit"", ""type"": ""text"", ""name"": ""NetskopeEventsAuditioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Audit data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Connection"", ""type"": ""text"", ""name"": ""NetskopeEventsConnectioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Connection data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events DLP"", ""type"": ""text"", ""name"": ""NetskopeEventsDLPingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Endpoint"", ""type"": ""text"", ""name"": ""NetskopeEventsEndpointingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Endpoint data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Infrastructure"", ""type"": ""text"", ""name"": ""NetskopeEventsInfrastructureingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Infrastructure data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Network"", ""type"": ""text"", ""name"": ""NetskopeEventsNetworkingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Network data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Page"", ""type"": ""text"", ""name"": ""NetskopeEventsPageingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Page data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""InstructionStepsGroup"", ""parameters"": {""instructionSteps"": [{""title"": ""OPTIONAL: Specify the Index the API uses."", ""description"": ""**Configuring the index is optional and only required in advanced scenario's.** \n Netskope uses an [index](https://docs.netskope.com/en/using-the-rest-api-v2-dataexport-iterator-endpoints/#how-do-iterator-endpoints-function) to retrieve events. In some advanced cases (consuming the event in multiple Microsoft Sentinel workspaces, or pre-fatiguing the index to only retrieve recent data), a customer might want to have direct control over the index."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Index"", ""placeholder"": ""NetskopeCCP"", ""type"": ""text"", ""name"": ""Index""}}]}]}}]}, {""title"": ""STEP 3 - Click Connect"", ""description"": ""Verify all fields above were filled in correctly. Press the Connect to connect Netskope to Microsoft Sentinel."", ""instructions"": [{""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Netskope organisation url"", ""description"": ""The Netskope data connector requires you to provide your organisation url. You can find your organisation url by signing into the Netskope portal.""}, {""name"": ""Netskope API key"", ""description"": ""The Netskope data connector requires you to provide a valid API key. You can create one by following the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeAlertsEvents_RestAPI_CCP/NetskopeAlertsEvents_ConnectorDefination.json","true" -"NetskopeEventsPage_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeAlertsEvents","Netskope","Netskope Alerts and Events","Netskope Security Alerts and Events","[{""title"": ""STEP 1 - Create a Netskope API key."", ""description"": ""Follow the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/) for guidance on this step.""}, {""title"": ""STEP 2 - Enter your Netskope product Details"", ""description"": ""Enter your Netskope organisation url & API Token below:"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Organisation Url"", ""placeholder"": ""Enter your organisation url"", ""type"": ""text"", ""name"": ""OrganisationURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""Enter your API Key"", ""type"": ""password"", ""name"": ""apikey""}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Remediation"", ""type"": ""text"", ""name"": ""NetskopeAlertsRemediationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Remediation data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Uba"", ""type"": ""text"", ""name"": ""NetskopeAlertsUbaingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Uba data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Security Assessment"", ""type"": ""text"", ""name"": ""NetskopeAlertsSecurityAssessmentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Security Assessment data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Quarantine"", ""type"": ""text"", ""name"": ""NetskopeAlertsQuarantineingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Quarantine data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Policy"", ""type"": ""text"", ""name"": ""NetskopeAlertsPolicyingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Policy data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malware"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalwareingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malware data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malsite"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalsiteingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malsite data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts DLP"", ""type"": ""text"", ""name"": ""NetskopeAlertsDlpingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts CTEP"", ""type"": ""text"", ""name"": ""NetskopeAlertsCtepingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts CTEP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Watchlist"", ""type"": ""text"", ""name"": ""NetskopeAlertsWatchlistingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Watchlist data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Compromised Credentials"", ""type"": ""text"", ""name"": ""NetskopeAlertsCompromisedCredentialsingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Compromised Credentials data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Content"", ""type"": ""text"", ""name"": ""NetskopeAlertsContentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Content data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Device"", ""type"": ""text"", ""name"": ""NetskopeAlertsDeviceingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Device data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Application"", ""type"": ""text"", ""name"": ""NetskopeEventsApplicationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Application data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Audit"", ""type"": ""text"", ""name"": ""NetskopeEventsAuditioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Audit data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Connection"", ""type"": ""text"", ""name"": ""NetskopeEventsConnectioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Connection data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events DLP"", ""type"": ""text"", ""name"": ""NetskopeEventsDLPingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Endpoint"", ""type"": ""text"", ""name"": ""NetskopeEventsEndpointingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Endpoint data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Infrastructure"", ""type"": ""text"", ""name"": ""NetskopeEventsInfrastructureingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Infrastructure data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Network"", ""type"": ""text"", ""name"": ""NetskopeEventsNetworkingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Network data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Page"", ""type"": ""text"", ""name"": ""NetskopeEventsPageingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Page data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""InstructionStepsGroup"", ""parameters"": {""instructionSteps"": [{""title"": ""OPTIONAL: Specify the Index the API uses."", ""description"": ""**Configuring the index is optional and only required in advanced scenario's.** \n Netskope uses an [index](https://docs.netskope.com/en/using-the-rest-api-v2-dataexport-iterator-endpoints/#how-do-iterator-endpoints-function) to retrieve events. In some advanced cases (consuming the event in multiple Microsoft Sentinel workspaces, or pre-fatiguing the index to only retrieve recent data), a customer might want to have direct control over the index."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Index"", ""placeholder"": ""NetskopeCCP"", ""type"": ""text"", ""name"": ""Index""}}]}]}}]}, {""title"": ""STEP 3 - Click Connect"", ""description"": ""Verify all fields above were filled in correctly. Press the Connect to connect Netskope to Microsoft Sentinel."", ""instructions"": [{""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Netskope organisation url"", ""description"": ""The Netskope data connector requires you to provide your organisation url. You can find your organisation url by signing into the Netskope portal.""}, {""name"": ""Netskope API key"", ""description"": ""The Netskope data connector requires you to provide a valid API key. You can create one by following the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeAlertsEvents_RestAPI_CCP/NetskopeAlertsEvents_ConnectorDefination.json","true" -"Netskope_WebTx_metrics_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Netskope APIs to pull its Alerts and Events data into custom log table. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create the azure functions for Netskope Alerts and Events Data Collection**\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Netskope API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""Using the ARM template deploy the function apps for ingestion of Netskope events and alerts data to Sentinel.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSelect Yes in Alerts and Events types dropdown for that endpoint you want to fetch Alerts and Events \n\t\tLog Level \n\t\tWorkspace ID \n\t\tWorkspace Key \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. See the documentation to learn more about API on the [Rest API reference](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" -"alertscompromisedcredentialdata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Netskope APIs to pull its Alerts and Events data into custom log table. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create the azure functions for Netskope Alerts and Events Data Collection**\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Netskope API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""Using the ARM template deploy the function apps for ingestion of Netskope events and alerts data to Sentinel.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSelect Yes in Alerts and Events types dropdown for that endpoint you want to fetch Alerts and Events \n\t\tLog Level \n\t\tWorkspace ID \n\t\tWorkspace Key \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. See the documentation to learn more about API on the [Rest API reference](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" -"alertsctepdata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Netskope APIs to pull its Alerts and Events data into custom log table. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create the azure functions for Netskope Alerts and Events Data Collection**\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Netskope API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""Using the ARM template deploy the function apps for ingestion of Netskope events and alerts data to Sentinel.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSelect Yes in Alerts and Events types dropdown for that endpoint you want to fetch Alerts and Events \n\t\tLog Level \n\t\tWorkspace ID \n\t\tWorkspace Key \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. See the documentation to learn more about API on the [Rest API reference](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" -"alertsdlpdata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Netskope APIs to pull its Alerts and Events data into custom log table. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create the azure functions for Netskope Alerts and Events Data Collection**\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Netskope API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""Using the ARM template deploy the function apps for ingestion of Netskope events and alerts data to Sentinel.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSelect Yes in Alerts and Events types dropdown for that endpoint you want to fetch Alerts and Events \n\t\tLog Level \n\t\tWorkspace ID \n\t\tWorkspace Key \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. See the documentation to learn more about API on the [Rest API reference](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" -"alertsmalsitedata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Netskope APIs to pull its Alerts and Events data into custom log table. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create the azure functions for Netskope Alerts and Events Data Collection**\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Netskope API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""Using the ARM template deploy the function apps for ingestion of Netskope events and alerts data to Sentinel.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSelect Yes in Alerts and Events types dropdown for that endpoint you want to fetch Alerts and Events \n\t\tLog Level \n\t\tWorkspace ID \n\t\tWorkspace Key \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. See the documentation to learn more about API on the [Rest API reference](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" -"alertsmalwaredata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Netskope APIs to pull its Alerts and Events data into custom log table. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create the azure functions for Netskope Alerts and Events Data Collection**\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Netskope API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""Using the ARM template deploy the function apps for ingestion of Netskope events and alerts data to Sentinel.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSelect Yes in Alerts and Events types dropdown for that endpoint you want to fetch Alerts and Events \n\t\tLog Level \n\t\tWorkspace ID \n\t\tWorkspace Key \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. See the documentation to learn more about API on the [Rest API reference](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" -"alertspolicydata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Netskope APIs to pull its Alerts and Events data into custom log table. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create the azure functions for Netskope Alerts and Events Data Collection**\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Netskope API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""Using the ARM template deploy the function apps for ingestion of Netskope events and alerts data to Sentinel.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSelect Yes in Alerts and Events types dropdown for that endpoint you want to fetch Alerts and Events \n\t\tLog Level \n\t\tWorkspace ID \n\t\tWorkspace Key \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. See the documentation to learn more about API on the [Rest API reference](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" -"alertsquarantinedata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Netskope APIs to pull its Alerts and Events data into custom log table. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create the azure functions for Netskope Alerts and Events Data Collection**\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Netskope API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""Using the ARM template deploy the function apps for ingestion of Netskope events and alerts data to Sentinel.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSelect Yes in Alerts and Events types dropdown for that endpoint you want to fetch Alerts and Events \n\t\tLog Level \n\t\tWorkspace ID \n\t\tWorkspace Key \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. See the documentation to learn more about API on the [Rest API reference](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" -"alertsremediationdata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Netskope APIs to pull its Alerts and Events data into custom log table. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create the azure functions for Netskope Alerts and Events Data Collection**\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Netskope API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""Using the ARM template deploy the function apps for ingestion of Netskope events and alerts data to Sentinel.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSelect Yes in Alerts and Events types dropdown for that endpoint you want to fetch Alerts and Events \n\t\tLog Level \n\t\tWorkspace ID \n\t\tWorkspace Key \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. See the documentation to learn more about API on the [Rest API reference](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" -"alertssecurityassessmentdata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Netskope APIs to pull its Alerts and Events data into custom log table. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create the azure functions for Netskope Alerts and Events Data Collection**\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Netskope API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""Using the ARM template deploy the function apps for ingestion of Netskope events and alerts data to Sentinel.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSelect Yes in Alerts and Events types dropdown for that endpoint you want to fetch Alerts and Events \n\t\tLog Level \n\t\tWorkspace ID \n\t\tWorkspace Key \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. See the documentation to learn more about API on the [Rest API reference](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" -"alertsubadata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Netskope APIs to pull its Alerts and Events data into custom log table. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create the azure functions for Netskope Alerts and Events Data Collection**\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Netskope API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""Using the ARM template deploy the function apps for ingestion of Netskope events and alerts data to Sentinel.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSelect Yes in Alerts and Events types dropdown for that endpoint you want to fetch Alerts and Events \n\t\tLog Level \n\t\tWorkspace ID \n\t\tWorkspace Key \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. See the documentation to learn more about API on the [Rest API reference](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" -"eventsapplicationdata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Netskope APIs to pull its Alerts and Events data into custom log table. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create the azure functions for Netskope Alerts and Events Data Collection**\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Netskope API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""Using the ARM template deploy the function apps for ingestion of Netskope events and alerts data to Sentinel.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSelect Yes in Alerts and Events types dropdown for that endpoint you want to fetch Alerts and Events \n\t\tLog Level \n\t\tWorkspace ID \n\t\tWorkspace Key \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. See the documentation to learn more about API on the [Rest API reference](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" -"eventsauditdata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Netskope APIs to pull its Alerts and Events data into custom log table. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create the azure functions for Netskope Alerts and Events Data Collection**\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Netskope API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""Using the ARM template deploy the function apps for ingestion of Netskope events and alerts data to Sentinel.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSelect Yes in Alerts and Events types dropdown for that endpoint you want to fetch Alerts and Events \n\t\tLog Level \n\t\tWorkspace ID \n\t\tWorkspace Key \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. See the documentation to learn more about API on the [Rest API reference](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" -"eventsconnectiondata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Netskope APIs to pull its Alerts and Events data into custom log table. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create the azure functions for Netskope Alerts and Events Data Collection**\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Netskope API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""Using the ARM template deploy the function apps for ingestion of Netskope events and alerts data to Sentinel.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSelect Yes in Alerts and Events types dropdown for that endpoint you want to fetch Alerts and Events \n\t\tLog Level \n\t\tWorkspace ID \n\t\tWorkspace Key \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. See the documentation to learn more about API on the [Rest API reference](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" -"eventsincidentdata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Netskope APIs to pull its Alerts and Events data into custom log table. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create the azure functions for Netskope Alerts and Events Data Collection**\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Netskope API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""Using the ARM template deploy the function apps for ingestion of Netskope events and alerts data to Sentinel.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSelect Yes in Alerts and Events types dropdown for that endpoint you want to fetch Alerts and Events \n\t\tLog Level \n\t\tWorkspace ID \n\t\tWorkspace Key \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. See the documentation to learn more about API on the [Rest API reference](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" -"eventsnetworkdata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Netskope APIs to pull its Alerts and Events data into custom log table. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create the azure functions for Netskope Alerts and Events Data Collection**\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Netskope API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""Using the ARM template deploy the function apps for ingestion of Netskope events and alerts data to Sentinel.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSelect Yes in Alerts and Events types dropdown for that endpoint you want to fetch Alerts and Events \n\t\tLog Level \n\t\tWorkspace ID \n\t\tWorkspace Key \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. See the documentation to learn more about API on the [Rest API reference](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" -"eventspagedata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Netskope APIs to pull its Alerts and Events data into custom log table. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create the azure functions for Netskope Alerts and Events Data Collection**\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Netskope API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""Using the ARM template deploy the function apps for ingestion of Netskope events and alerts data to Sentinel.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSelect Yes in Alerts and Events types dropdown for that endpoint you want to fetch Alerts and Events \n\t\tLog Level \n\t\tWorkspace ID \n\t\tWorkspace Key \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. See the documentation to learn more about API on the [Rest API reference](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" -"NetskopeWebtxData_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeWebTransactionsDataConnector","Netskope","Netskope Web Transactions Data Connector","The [Netskope Web Transactions](https://docs.netskope.com/en/netskope-help/data-security/transaction-events/netskope-transaction-events/) data connector provides the functionality of a docker image to pull the Netskope Web Transactions data from google pubsublite, process the data and ingest the processed data to Log Analytics. As part of this data connector two tables will be formed in Log Analytics, one for Web Transactions data and other for errors encountered during execution.


For more details related to Web Transactions refer to the below documentation:
1. Netskope Web Transactions documentation:
> https://docs.netskope.com/en/netskope-help/data-security/transaction-events/netskope-transaction-events/
","[{""title"": """", ""description"": "">**NOTE:** This connector provides the functionality of ingesting Netskope Web Transactions data using a docker image to be deployed on a virtual machine (Either Azure VM/On Premise VM). Check the [Azure VM pricing page](https://azure.microsoft.com/pricing/details/virtual-machines/linux) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 2 - Choose one from the following two deployment options to deploy the docker based data connector to ingest Netskope Web Transactions data **\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available, as well as the Netskope API Authorization Key(s) [Make sure the token has permissions for transaction events]."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Using Azure Resource Manager (ARM) Template to deploy VM [Recommended]"", ""description"": ""Using the ARM template deploy an Azure VM, install the prerequisites and start execution.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2WebTransactions-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tDocker Image Name (mgulledge/netskope-microsoft-sentinel-plugin:netskopewebtransactions)\n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSeek Timestamp (The epoch timestamp that you want to seek the pubsublite pointer, can be left empty) \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tBackoff Retry Count (The retry count for token related errors before restarting the execution.) \n\t\tBackoff Sleep Time (Number of seconds to sleep before retrying) \n\t\tIdle Timeout (Number of seconds to wait for Web Transactions Data before restarting execution) \n\t\tVM Name \n\t\tAuthentication Type \n\t\tAdmin Password or Key \n\t\tDNS Label Prefix \n\t\tUbuntu OS Version \n\t\tLocation \n\t\tVM Size \n\t\tSubnet Name \n\t\tNetwork Security Group Name \n\t\tSecurity Type \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}, {""title"": ""Option 2 - Manual Deployment on previously created virtual machine"", ""description"": ""Use the following step-by-step instructions to deploy the docker based data connector manually on a previously created virtual machine.""}, {""title"": """", ""description"": ""**1. Install docker and pull docker Image**\n\n>**NOTE:** Make sure that the VM is linux based (preferably Ubuntu).\n\n1. Firstly you will need to [SSH into the virtual machine](https://learn.microsoft.com/azure/virtual-machines/linux-vm-connect?tabs=Linux).\n2. Now install [docker engine](https://docs.docker.com/engine/install/).\n3. Now pull the docker image from docker hub using the command: 'sudo docker pull mgulledge/netskope-microsoft-sentinel-plugin:netskopewebtransactions'.\n4. Now to run the docker image use the command: 'sudo docker run -it -v $(pwd)/docker_persistent_volume:/app mgulledge/netskope-microsoft-sentinel-plugin:netskopewebtransactions'. You can replace mgulledge/netskope-microsoft-sentinel-plugin:netskopewebtransactions with the image id. Here docker_persistent_volume is the name of the folder that would be created on the vm in which the files will get stored.""}, {""title"": """", ""description"": ""**2. Configure the Parameters**\n\n1. Once the docker image is running it will ask for the required parameters.\n2. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSeek Timestamp (The epoch timestamp that you want to seek the pubsublite pointer, can be left empty) \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tBackoff Retry Count (The retry count for token related errors before restarting the execution.) \n\t\tBackoff Sleep Time (Number of seconds to sleep before retrying) \n\t\tIdle Timeout (Number of seconds to wait for Web Transactions Data before restarting execution)\n3. Now the execution has started but is in interactive mode, so that shell cannot be stopped. To run it as a background process, stop the current execution by pressing Ctrl+C and then use the command: 'sudo docker run -d -v $(pwd)/docker_persistent_volume:/app mgulledge/netskope-microsoft-sentinel-plugin:netskopewebtransactions'.""}, {""title"": """", ""description"": ""**3. Stop the docker container**\n\n1. Use the command 'sudo docker container ps' to list the running docker containers. Note down your container id.\n2. Now stop the container using the command: 'sudo docker stop *<*container-id*>*'.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Compute permissions"", ""description"": ""Read and write permissions to Azure VMs is required. [See the documentation to learn more about Azure VMs](https://learn.microsoft.com/azure/virtual-machines/overview).""}, {""name"": ""TransactionEvents Credentials and Permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. [See the documentation to learn more about Transaction Events.](https://docs.netskope.com/en/netskope-help/data-security/transaction-events/netskope-transaction-events/)""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeWebTransactionsDataConnector/Netskope_WebTransactions.json","true" -"NetskopeWebtxErrors_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeWebTransactionsDataConnector","Netskope","Netskope Web Transactions Data Connector","The [Netskope Web Transactions](https://docs.netskope.com/en/netskope-help/data-security/transaction-events/netskope-transaction-events/) data connector provides the functionality of a docker image to pull the Netskope Web Transactions data from google pubsublite, process the data and ingest the processed data to Log Analytics. As part of this data connector two tables will be formed in Log Analytics, one for Web Transactions data and other for errors encountered during execution.


For more details related to Web Transactions refer to the below documentation:
1. Netskope Web Transactions documentation:
> https://docs.netskope.com/en/netskope-help/data-security/transaction-events/netskope-transaction-events/
","[{""title"": """", ""description"": "">**NOTE:** This connector provides the functionality of ingesting Netskope Web Transactions data using a docker image to be deployed on a virtual machine (Either Azure VM/On Premise VM). Check the [Azure VM pricing page](https://azure.microsoft.com/pricing/details/virtual-machines/linux) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 2 - Choose one from the following two deployment options to deploy the docker based data connector to ingest Netskope Web Transactions data **\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available, as well as the Netskope API Authorization Key(s) [Make sure the token has permissions for transaction events]."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Using Azure Resource Manager (ARM) Template to deploy VM [Recommended]"", ""description"": ""Using the ARM template deploy an Azure VM, install the prerequisites and start execution.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2WebTransactions-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tDocker Image Name (mgulledge/netskope-microsoft-sentinel-plugin:netskopewebtransactions)\n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSeek Timestamp (The epoch timestamp that you want to seek the pubsublite pointer, can be left empty) \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tBackoff Retry Count (The retry count for token related errors before restarting the execution.) \n\t\tBackoff Sleep Time (Number of seconds to sleep before retrying) \n\t\tIdle Timeout (Number of seconds to wait for Web Transactions Data before restarting execution) \n\t\tVM Name \n\t\tAuthentication Type \n\t\tAdmin Password or Key \n\t\tDNS Label Prefix \n\t\tUbuntu OS Version \n\t\tLocation \n\t\tVM Size \n\t\tSubnet Name \n\t\tNetwork Security Group Name \n\t\tSecurity Type \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}, {""title"": ""Option 2 - Manual Deployment on previously created virtual machine"", ""description"": ""Use the following step-by-step instructions to deploy the docker based data connector manually on a previously created virtual machine.""}, {""title"": """", ""description"": ""**1. Install docker and pull docker Image**\n\n>**NOTE:** Make sure that the VM is linux based (preferably Ubuntu).\n\n1. Firstly you will need to [SSH into the virtual machine](https://learn.microsoft.com/azure/virtual-machines/linux-vm-connect?tabs=Linux).\n2. Now install [docker engine](https://docs.docker.com/engine/install/).\n3. Now pull the docker image from docker hub using the command: 'sudo docker pull mgulledge/netskope-microsoft-sentinel-plugin:netskopewebtransactions'.\n4. Now to run the docker image use the command: 'sudo docker run -it -v $(pwd)/docker_persistent_volume:/app mgulledge/netskope-microsoft-sentinel-plugin:netskopewebtransactions'. You can replace mgulledge/netskope-microsoft-sentinel-plugin:netskopewebtransactions with the image id. Here docker_persistent_volume is the name of the folder that would be created on the vm in which the files will get stored.""}, {""title"": """", ""description"": ""**2. Configure the Parameters**\n\n1. Once the docker image is running it will ask for the required parameters.\n2. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSeek Timestamp (The epoch timestamp that you want to seek the pubsublite pointer, can be left empty) \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tBackoff Retry Count (The retry count for token related errors before restarting the execution.) \n\t\tBackoff Sleep Time (Number of seconds to sleep before retrying) \n\t\tIdle Timeout (Number of seconds to wait for Web Transactions Data before restarting execution)\n3. Now the execution has started but is in interactive mode, so that shell cannot be stopped. To run it as a background process, stop the current execution by pressing Ctrl+C and then use the command: 'sudo docker run -d -v $(pwd)/docker_persistent_volume:/app mgulledge/netskope-microsoft-sentinel-plugin:netskopewebtransactions'.""}, {""title"": """", ""description"": ""**3. Stop the docker container**\n\n1. Use the command 'sudo docker container ps' to list the running docker containers. Note down your container id.\n2. Now stop the container using the command: 'sudo docker stop *<*container-id*>*'.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Compute permissions"", ""description"": ""Read and write permissions to Azure VMs is required. [See the documentation to learn more about Azure VMs](https://learn.microsoft.com/azure/virtual-machines/overview).""}, {""name"": ""TransactionEvents Credentials and Permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. [See the documentation to learn more about Transaction Events.](https://docs.netskope.com/en/netskope-help/data-security/transaction-events/netskope-transaction-events/)""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeWebTransactionsDataConnector/Netskope_WebTransactions.json","true" -"","Network Session Essentials","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Network%20Session%20Essentials","azuresentinel","azure-sentinel-solution-networksession","2022-11-11","2022-11-11","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"","Network Threat Protection Essentials","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Network%20Threat%20Protection%20Essentials","azuresentinel","azure-sentinel-solution-networkthreatdetection","2022-11-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"","Netwrix Auditor","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netwrix%20Auditor","azuresentinel","azure-sentinel-solution-netwrixauditor","2022-06-17","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"CommonSecurityLog","Netwrix Auditor","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netwrix%20Auditor","azuresentinel","azure-sentinel-solution-netwrixauditor","2022-06-17","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","Netwrix","Netwrix","[Deprecated] Netwrix Auditor via Legacy Agent","Netwrix Auditor data connector provides the capability to ingest [Netwrix Auditor (formerly Stealthbits Privileged Activity Manager)](https://www.netwrix.com/auditor.html) events into Microsoft Sentinel. Refer to [Netwrix documentation](https://helpcenter.netwrix.com/) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on NetwrixAuditor parser based on a Kusto Function to work as expected. This parser is installed along with solution installation."", ""instructions"": []}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Configure Netwrix Auditor to send logs using CEF"", ""description"": ""[Follow the instructions](https://www.netwrix.com/download/QuickStart/Netwrix_Auditor_Add-on_for_HPE_ArcSight_Quick_Start_Guide.pdf) to configure event export from Netwrix Auditor.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netwrix%20Auditor/Data%20Connectors/Connector_NetwrixAuditor.json","true" -"CommonSecurityLog","Netwrix Auditor","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netwrix%20Auditor","azuresentinel","azure-sentinel-solution-netwrixauditor","2022-06-17","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","NetwrixAma","Netwrix","[Deprecated] Netwrix Auditor via AMA","Netwrix Auditor data connector provides the capability to ingest [Netwrix Auditor (formerly Stealthbits Privileged Activity Manager)](https://www.netwrix.com/auditor.html) events into Microsoft Sentinel. Refer to [Netwrix documentation](https://helpcenter.netwrix.com/) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on NetwrixAuditor parser based on a Kusto Function to work as expected. This parser is installed along with solution installation."", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Configure Netwrix Auditor to send logs using CEF"", ""description"": ""[Follow the instructions](https://www.netwrix.com/download/QuickStart/Netwrix_Auditor_Add-on_for_HPE_ArcSight_Quick_Start_Guide.pdf) to configure event export from Netwrix Auditor."", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netwrix%20Auditor/Data%20Connectors/template_NetwrixAuditorAMA.json","true" -"","Neustar IP GeoPoint","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Neustar%20IP%20GeoPoint","azuresentinel","azure-sentinel-solution-neustaripgeopoint","2022-09-30","2022-09-30","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"","NonameSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NonameSecurity","nonamegate","nonamesecurity_sentinelsolution","2022-12-01","","","Noname Security","Partner","https://nonamesecurity.com/","","domains","","","","","","","false","","false" -"NonameAPISecurityAlert_CL","NonameSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NonameSecurity","nonamegate","nonamesecurity_sentinelsolution","2022-12-01","","","Noname Security","Partner","https://nonamesecurity.com/","","domains","NonameSecurityMicrosoftSentinel","Noname Security","Noname Security for Microsoft Sentinel","Noname Security solution to POST data into a Microsoft Sentinel SIEM workspace via the Azure Monitor REST API","[{""title"": ""Configure the Noname Sentinel integration."", ""description"": ""Configure the Sentinel workflow in the Noname integrations settings. Find documentation at https://docs.nonamesecurity.com"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NonameSecurity/Data%20Connectors/Connector_RESTAPI_NonameSecurity.json","true" -"","NordPass","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NordPass","nordsecurityinc","azure-sentinel-solution-nordpass","2025-04-22","","","NordPass","Partner","https://support.nordpass.com/","","domains","","","","","","","false","","false" -"NordPassEventLogs_CL","NordPass","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NordPass","nordsecurityinc","azure-sentinel-solution-nordpass","2025-04-22","","","NordPass","Partner","https://support.nordpass.com/","","domains","NordPass","NordPass","NordPass","Integrating NordPass with Microsoft Sentinel SIEM via the API will allow you to automatically transfer Activity Log data from NordPass to Microsoft Sentinel and get real-time insights, such as item activity, all login attempts, and security notifications.","[{""description"": ""To proceed with the Microsoft Sentinel setup\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-Nordpass-azuredeploy)\n2. **Please note that after the successful deployment, the system pulls Activity Log data every 1 minute by default.**""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""description"": ""Ensure that the [resource group](https://learn.microsoft.com/en-us/azure/azure-resource-manager/management/manage-resource-groups-portal#create-resource-groups) and the [Log Analytics workspace](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/quick-create-workspace?tabs=azure-portal) are created and located in the same region so you can deploy the Azure Functions.""}, {""description"": ""[Add Microsoft Sentinel](https://learn.microsoft.com/en-us/azure/sentinel/quickstart-onboard#enable-microsoft-sentinel-) to the created Log Analytics workspace.""}, {""description"": ""Generate a [Microsoft Sentinel API URL and token](https://www.google.com/url?q=https://support.nordpass.com/hc/en-us/articles/31972037289873&sa=D&source=docs&ust=1743770997230005&usg=AOvVaw16p0hstJ6OeBBoFdBKZRfr) in the NordPass Admin Panel to finish the Azure Functions integration. Please note that you\u2019ll need the NordPass Enterprise account for that.""}, {""description"": ""**Important:** This connector uses Azure Functions to retrieve Activity Logs from NordPass into Microsoft Sentinel. This may result in additional data ingestion costs. For more information, refer to the Azure Functions pricing page.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NordPass/Data%20Connectors/NordPass_API_FunctionApp.json;https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NordPass/Data%20Connectors/deployment/NordPass_data_connector.json","false" -"","NozomiNetworks","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NozomiNetworks","azuresentinel","azure-sentinel-solution-nozominetworks","2022-07-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"CommonSecurityLog","NozomiNetworks","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NozomiNetworks","azuresentinel","azure-sentinel-solution-nozominetworks","2022-07-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","NozomiNetworksN2OS","Nozomi Networks","[Deprecated] Nozomi Networks N2OS via Legacy Agent","The [Nozomi Networks](https://www.nozominetworks.com/) data connector provides the capability to ingest Nozomi Networks Events into Microsoft Sentinel. Refer to the Nozomi Networks [PDF documentation](https://www.nozominetworks.com/resources/data-sheets-brochures-learning-guides/) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**NozomiNetworksEvents**](https://aka.ms/sentinel-NozomiNetworks-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Follow these steps to configure Nozomi Networks device for sending Alerts, Audit Logs, Health Logs log via syslog in CEF format:\n\n> 1. Log in to the Guardian console.\n\n> 2. Navigate to Administration->Data Integration, press +Add and select the Common Event Format (CEF) from the drop down\n\n> 3. Create New Endpoint using the appropriate host information and enable Alerts, Audit Logs, Health Logs for sending.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NozomiNetworks/Data%20Connectors/NozomiNetworksN2OS.json","true" -"CommonSecurityLog","NozomiNetworks","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NozomiNetworks","azuresentinel","azure-sentinel-solution-nozominetworks","2022-07-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","NozomiNetworksN2OSAma","Nozomi Networks","[Deprecated] Nozomi Networks N2OS via AMA","The [Nozomi Networks](https://www.nozominetworks.com/) data connector provides the capability to ingest Nozomi Networks Events into Microsoft Sentinel. Refer to the Nozomi Networks [PDF documentation](https://www.nozominetworks.com/resources/data-sheets-brochures-learning-guides/) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**NozomiNetworksEvents**](https://aka.ms/sentinel-NozomiNetworks-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Follow these steps to configure Nozomi Networks device for sending Alerts, Audit Logs, Health Logs log via syslog in CEF format:\n\n> 1. Log in to the Guardian console.\n\n> 2. Navigate to Administration->Data Integration, press +Add and select the Common Event Format (CEF) from the drop down\n\n> 3. Create New Endpoint using the appropriate host information and enable Alerts, Audit Logs, Health Logs for sending."", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NozomiNetworks/Data%20Connectors/template_NozomiNetworksN2OSAMA.json","true" -"","OSSEC","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OSSEC","azuresentinel","azure-sentinel-solution-ossec","2022-05-19","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"CommonSecurityLog","OSSEC","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OSSEC","azuresentinel","azure-sentinel-solution-ossec","2022-05-19","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","OSSEC","OSSEC","[Deprecated] OSSEC via Legacy Agent","OSSEC data connector provides the capability to ingest [OSSEC](https://www.ossec.net/) events into Microsoft Sentinel. Refer to [OSSEC documentation](https://www.ossec.net/docs) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias OSSEC and load the function code or click [here](https://aka.ms/sentinel-OSSECEvent-parser), on the second line of the query, enter the hostname(s) of your OSSEC device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""[Follow these steps](https://www.ossec.net/docs/docs/manual/output/syslog-output.html) to configure OSSEC sending alerts via syslog.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OSSEC/Data%20Connectors/Connector_CEF_OSSEC.json","true" -"CommonSecurityLog","OSSEC","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OSSEC","azuresentinel","azure-sentinel-solution-ossec","2022-05-19","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","OSSECAma","OSSEC","[Deprecated] OSSEC via AMA","OSSEC data connector provides the capability to ingest [OSSEC](https://www.ossec.net/) events into Microsoft Sentinel. Refer to [OSSEC documentation](https://www.ossec.net/docs) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias OSSEC and load the function code or click [here](https://aka.ms/sentinel-OSSECEvent-parser), on the second line of the query, enter the hostname(s) of your OSSEC device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""[Follow these steps](https://www.ossec.net/docs/docs/manual/output/syslog-output.html) to configure OSSEC sending alerts via syslog."", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OSSEC/Data%20Connectors/template_OSSECAMA.json","true" -"","Obsidian Datasharing","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Obsidian%20Datasharing","391c3d87-edc8-4f72-a719-825c022b8eb4","azure-sentinel-solution-obsidian-activity-threat","2024-01-01","","","Obsidian Security","Partner","https://obsidiansecurity.com/contact","","domains","","","","","","","false","","false" -"ObsidianActivity_CL","Obsidian Datasharing","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Obsidian%20Datasharing","391c3d87-edc8-4f72-a719-825c022b8eb4","azure-sentinel-solution-obsidian-activity-threat","2024-01-01","","","Obsidian Security","Partner","https://obsidiansecurity.com/contact","","domains","ObsidianDatasharing","Obsidian Security","Obsidian Datasharing Connector","The Obsidian Datasharing connector provides the capability to read raw event data from Obsidian Datasharing in Microsoft Sentinel.","[{""title"": ""1. Create ARM Resources and Provide the Required Permissions"", ""description"": ""This connector reads data from the tables that Obsidian Datasharing uses in a Microsoft Analytics Workspace, if the data forwarding option is enabled in Obsidian Datasharing then raw event data is sent to the Microsoft Sentinel Ingestion API."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Automated Configuration and Secure Data Ingestion with Entra Application \nClicking on \""Deploy\"" will trigger the creation of Log Analytics tables and a Data Collection Rule (DCR). \nIt will then create an Entra application, link the DCR to it, and set the entered secret in the application. This setup enables data to be sent securely to the DCR using an Entra token.""}}, {""parameters"": {""label"": ""Deploy Obsidian Datasharing connector resources"", ""applicationDisplayName"": ""Obsidian Datasharing Connector Application""}, ""type"": ""DeployPushConnectorButton""}]}, {""title"": ""2. Push your logs into the workspace"", ""description"": ""Use the following parameters to configure the your machine to send the logs to the workspace."", ""instructions"": [{""parameters"": {""label"": ""Tenant ID (Directory ID)"", ""fillWith"": [""TenantId""]}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Application ID"", ""fillWith"": [""ApplicationId""], ""placeholder"": ""Deploy push connector to get the App Registration Application ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Secret"", ""fillWith"": [""ApplicationSecret""], ""placeholder"": ""Deploy push connector to get the App Registration Secret""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Endpoint Uri"", ""fillWith"": [""DataCollectionEndpoint""], ""placeholder"": ""Deploy push connector to get the Data Collection Endpoint Uri""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Rule Immutable ID"", ""fillWith"": [""DataCollectionRuleId""], ""placeholder"": ""Deploy push connector to get the Data Collection Rule Immutable ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Activity Stream Name"", ""value"": ""Custom-ObsidianActivity_CL""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Threat Stream Name"", ""value"": ""Custom-ObsidianThreat_CL""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft Entra"", ""description"": ""Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher.""}, {""name"": ""Microsoft Azure"", ""description"": ""Permission to assign Monitoring Metrics Publisher role on data collection rule (DCR). Typically requires Azure RBAC Owner or User Access Administrator role""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Obsidian%20Datasharing/Data%20Connectors/ObsidianDatasharing_CCP/ObsidianDatasharing_ConnectorDefinition.json","true" -"ObsidianThreat_CL","Obsidian Datasharing","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Obsidian%20Datasharing","391c3d87-edc8-4f72-a719-825c022b8eb4","azure-sentinel-solution-obsidian-activity-threat","2024-01-01","","","Obsidian Security","Partner","https://obsidiansecurity.com/contact","","domains","ObsidianDatasharing","Obsidian Security","Obsidian Datasharing Connector","The Obsidian Datasharing connector provides the capability to read raw event data from Obsidian Datasharing in Microsoft Sentinel.","[{""title"": ""1. Create ARM Resources and Provide the Required Permissions"", ""description"": ""This connector reads data from the tables that Obsidian Datasharing uses in a Microsoft Analytics Workspace, if the data forwarding option is enabled in Obsidian Datasharing then raw event data is sent to the Microsoft Sentinel Ingestion API."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Automated Configuration and Secure Data Ingestion with Entra Application \nClicking on \""Deploy\"" will trigger the creation of Log Analytics tables and a Data Collection Rule (DCR). \nIt will then create an Entra application, link the DCR to it, and set the entered secret in the application. This setup enables data to be sent securely to the DCR using an Entra token.""}}, {""parameters"": {""label"": ""Deploy Obsidian Datasharing connector resources"", ""applicationDisplayName"": ""Obsidian Datasharing Connector Application""}, ""type"": ""DeployPushConnectorButton""}]}, {""title"": ""2. Push your logs into the workspace"", ""description"": ""Use the following parameters to configure the your machine to send the logs to the workspace."", ""instructions"": [{""parameters"": {""label"": ""Tenant ID (Directory ID)"", ""fillWith"": [""TenantId""]}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Application ID"", ""fillWith"": [""ApplicationId""], ""placeholder"": ""Deploy push connector to get the App Registration Application ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Secret"", ""fillWith"": [""ApplicationSecret""], ""placeholder"": ""Deploy push connector to get the App Registration Secret""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Endpoint Uri"", ""fillWith"": [""DataCollectionEndpoint""], ""placeholder"": ""Deploy push connector to get the Data Collection Endpoint Uri""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Rule Immutable ID"", ""fillWith"": [""DataCollectionRuleId""], ""placeholder"": ""Deploy push connector to get the Data Collection Rule Immutable ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Activity Stream Name"", ""value"": ""Custom-ObsidianActivity_CL""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Threat Stream Name"", ""value"": ""Custom-ObsidianThreat_CL""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft Entra"", ""description"": ""Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher.""}, {""name"": ""Microsoft Azure"", ""description"": ""Permission to assign Monitoring Metrics Publisher role on data collection rule (DCR). Typically requires Azure RBAC Owner or User Access Administrator role""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Obsidian%20Datasharing/Data%20Connectors/ObsidianDatasharing_CCP/ObsidianDatasharing_ConnectorDefinition.json","true" -"","Okta Single Sign-On","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Okta%20Single%20Sign-On","azuresentinel","azure-sentinel-solution-okta","2022-03-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"Okta_CL","Okta Single Sign-On","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Okta%20Single%20Sign-On","azuresentinel","azure-sentinel-solution-okta","2022-03-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OktaSSO","Okta","Okta Single Sign-On","The [Okta Single Sign-On (SSO)](https://www.okta.com/products/single-sign-on/) connector provides the capability to ingest audit and event logs from the Okta API into Microsoft Sentinel. The connector provides visibility into these log types in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to Okta SSO to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated, if you have previously deployed an earlier version, and want to update, please delete the existing Okta Azure Function before redeploying this version.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Okta SSO API**\n\n [Follow these instructions](https://developer.okta.com/docs/guides/create-an-api-token/create-the-token/) to create an API Token.""}, {""title"": """", ""description"": ""**Note** - For more information on the rate limit restrictions enforced by Okta, please refer to the **[documentation](https://developer.okta.com/docs/reference/rl-global-mgmt/)**.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Okta SSO connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Okta SSO API Authorization Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""This method provides an automated deployment of the Okta SSO connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentineloktaazuredeployv2-solution) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentineloktaazuredeployv2-solution-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **API Token** and **URI**. \n - Use the following schema for the `uri` value: `https:///api/v1/logs?since=` Replace `` with your domain. [Click here](https://developer.okta.com/docs/reference/api-overview/#url-namespace) for further details on how to identify your Okta domain namespace. There is no need to add a time value to the URI, the Function App will dynamically append the inital start time of logs to UTC 0:00 for the current UTC date as time value to the URI in the proper format. \n - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Okta SSO connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the [Azure Function App](https://aka.ms/sentineloktaazurefunctioncodev2) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following five (5) application settings individually, with their respective string values (case-sensitive): \n\t\tapiToken\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\turi\n\t\tlogAnalyticsUri (optional)\n - Use the following schema for the `uri` value: `https:///api/v1/logs?since=` Replace `` with your domain. [Click here](https://developer.okta.com/docs/reference/api-overview/#url-namespace) for further details on how to identify your Okta domain namespace. There is no need to add a time value to the URI, the Function App will dynamically append the inital start time of logs to UTC 0:00 for the current UTC date as time value to the URI in the proper format.\n - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: https://.ods.opinsights.azure.us. \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Okta API Token"", ""description"": ""An Okta API Token is required. See the documentation to learn more about the [Okta System Log API](https://developer.okta.com/docs/reference/api/system-log/).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Okta%20Single%20Sign-On/Data%20Connectors/OktaSingleSign-On/Connector_REST_API_FunctionApp_Okta.json","true" -"OktaNativePoller_CL","Okta Single Sign-On","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Okta%20Single%20Sign-On","azuresentinel","azure-sentinel-solution-okta","2022-03-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OktaSSO_Polling","Okta","Okta Single Sign-On (Polling CCP)","The [Okta Single Sign-On (SSO)](https://www.okta.com/products/single-sign-on/) connector provides the capability to ingest audit and event logs from the Okta API into Microsoft entinel. The connector provides visibility into these log types in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.","[{""title"": ""Connect OktaSSO"", ""description"": ""Please insert your APIKey"", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""Domain Name"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{placeHolder1}}"", ""placeHolderValue"": """"}]}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Okta%20Single%20Sign-On/Data%20Connectors/OktaNativePollerConnector/azuredeploy_Okta_native_poller_connector.json","true" -"OktaV2_CL","Okta Single Sign-On","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Okta%20Single%20Sign-On","azuresentinel","azure-sentinel-solution-okta","2022-03-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OktaSSOv2","Microsoft","Okta Single Sign-On","The [Okta Single Sign-On (SSO)](https://www.okta.com/products/single-sign-on/) data connector provides the capability to ingest audit and event logs from the Okta Sysem Log API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform and uses the Okta System Log API to fetch the events. The connector supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security event data into a custom columns so that queries don't need to parse it again, thus resulting in better performance.","[{""description"": ""To enable the Okta Single Sign-On for Microsoft Sentinel, provide the required information below and click on Connect.\n>"", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Endpoint"", ""columnValue"": ""properties.request.apiEndpoint""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add domain"", ""title"": ""Add domain"", ""subtitle"": ""Add domain"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Okta Domain Name"", ""placeholder"": ""Okta Domain Name (e.g., myDomain.okta.com)"", ""type"": ""text"", ""name"": ""domainname""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""API Key"", ""type"": ""password"", ""name"": ""apikey""}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Okta API Token"", ""description"": ""An Okta API token. Follow the [following instructions](https://developer.okta.com/docs/guides/create-an-api-token/main/) to create an See the [documentation](https://developer.okta.com/docs/reference/api/system-log/) to learn more about Okta System Log API.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Okta%20Single%20Sign-On/Data%20Connectors/OktaNativePollerConnectorV2/OktaSSOv2_DataConnectorDefinition.json","true" -"Okta_CL","Okta Single Sign-On","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Okta%20Single%20Sign-On","azuresentinel","azure-sentinel-solution-okta","2022-03-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OktaSSOv2","Microsoft","Okta Single Sign-On","The [Okta Single Sign-On (SSO)](https://www.okta.com/products/single-sign-on/) data connector provides the capability to ingest audit and event logs from the Okta Sysem Log API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform and uses the Okta System Log API to fetch the events. The connector supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security event data into a custom columns so that queries don't need to parse it again, thus resulting in better performance.","[{""description"": ""To enable the Okta Single Sign-On for Microsoft Sentinel, provide the required information below and click on Connect.\n>"", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Endpoint"", ""columnValue"": ""properties.request.apiEndpoint""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add domain"", ""title"": ""Add domain"", ""subtitle"": ""Add domain"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Okta Domain Name"", ""placeholder"": ""Okta Domain Name (e.g., myDomain.okta.com)"", ""type"": ""text"", ""name"": ""domainname""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""API Key"", ""type"": ""password"", ""name"": ""apikey""}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Okta API Token"", ""description"": ""An Okta API token. Follow the [following instructions](https://developer.okta.com/docs/guides/create-an-api-token/main/) to create an See the [documentation](https://developer.okta.com/docs/reference/api/system-log/) to learn more about Okta System Log API.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Okta%20Single%20Sign-On/Data%20Connectors/OktaNativePollerConnectorV2/OktaSSOv2_DataConnectorDefinition.json","true" -"signIns","Okta Single Sign-On","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Okta%20Single%20Sign-On","azuresentinel","azure-sentinel-solution-okta","2022-03-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OktaSSOv2","Microsoft","Okta Single Sign-On (Preview)","The [Okta Single Sign-On (SSO)](https://www.okta.com/products/single-sign-on/) data connector provides the capability to ingest audit and event logs from the Okta Sysem Log API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform and uses the Okta System Log API to fetch the events. The connector supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security event data into a custom columns so that queries don't need to parse it again, thus resulting in better performance.","[{""description"": ""To enable the Okta Single Sign-On for Microsoft Sentinel, provide the required information below and click on Connect.\n>"", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Endpoint"", ""columnValue"": ""properties.request.apiEndpoint""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add domain"", ""title"": ""Add domain"", ""subtitle"": ""Add domain"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Okta Domain Name"", ""placeholder"": ""Okta Domain Name (e.g., myDomain.okta.com)"", ""type"": ""text"", ""name"": ""domainname""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""API Key"", ""type"": ""password"", ""name"": ""apikey""}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Okta API Token"", ""description"": ""An Okta API token. Follow the [following instructions](https://developer.okta.com/docs/guides/create-an-api-token/main/) to create an See the [documentation](https://developer.okta.com/docs/reference/api/system-log/) to learn more about Okta System Log API.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Okta%20Single%20Sign-On/Data%20Connectors/OktaNativePollerConnectorV2/azuredeploy_Okta_native_poller_connector_v2.json","true" -"OktaV2_CL","Okta Single Sign-On","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Okta%20Single%20Sign-On","azuresentinel","azure-sentinel-solution-okta","2022-03-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OktaSingleSignOn(usingAzureFunctions)","Okta","Okta Single Sign-On (using Azure Functions)","The [Okta Single Sign-On (SSO)](https://www.okta.com/products/single-sign-on/) connector provides the capability to ingest audit and event logs from the Okta API into Microsoft Sentinel. The connector provides visibility into these log types in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.","[{""description"": "">**NOTE:** This connector uses Azure Functions to connect to Okta SSO to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""description"": "">**NOTE:** This connector has been updated, if you have previously deployed an earlier version, and want to update, please delete the existing Okta Azure Function before redeploying this version.""}, {""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**STEP 1 - Configuration steps for the Okta SSO API**\n\n [Follow these instructions](https://developer.okta.com/docs/guides/create-an-api-token/create-the-token/) to create an API Token.""}, {""description"": ""**Note** - For more information on the rate limit restrictions enforced by Okta, please refer to the **[documentation](https://developer.okta.com/docs/reference/rl-global-mgmt/)**.""}, {""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Okta SSO connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Okta SSO API Authorization Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""This method provides an automated deployment of the Okta SSO connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentineloktaazuredeployv2-solution)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **API Token** and **URI**. \n - Use the following schema for the `uri` value: `https:///api/v1/logs?since=` Replace `` with your domain. [Click here](https://developer.okta.com/docs/reference/api-overview/#url-namespace) for further details on how to identify your Okta domain namespace. There is no need to add a time value to the URI, the Function App will dynamically append the inital start time of logs to UTC 0:00 for the current UTC date as time value to the URI in the proper format. \n - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Okta SSO connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the [Azure Function App](https://aka.ms/sentineloktaazurefunctioncodev2) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following five (5) application settings individually, with their respective string values (case-sensitive): \n\t\tapiToken\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\turi\n\t\tlogAnalyticsUri (optional)\n - Use the following schema for the `uri` value: `https:///api/v1/logs?since=` Replace `` with your domain. [Click here](https://developer.okta.com/docs/reference/api-overview/#url-namespace) for further details on how to identify your Okta domain namespace. There is no need to add a time value to the URI, the Function App will dynamically append the inital start time of logs to UTC 0:00 for the current UTC date as time value to the URI in the proper format.\n - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: https://.ods.opinsights.azure.us. \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Okta API Token"", ""description"": ""An Okta API Token is required. See the documentation to learn more about the [Okta System Log API](https://developer.okta.com/docs/reference/api/system-log/).""}]}","true","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Okta%20Single%20Sign-On/Data%20Connectors/OktaNativePollerConnectorV2/azuredeploy_Okta_native_poller_connector_v2.json","true" -"Okta_CL","Okta Single Sign-On","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Okta%20Single%20Sign-On","azuresentinel","azure-sentinel-solution-okta","2022-03-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OktaSingleSignOn(usingAzureFunctions)","Okta","Okta Single Sign-On (using Azure Functions)","The [Okta Single Sign-On (SSO)](https://www.okta.com/products/single-sign-on/) connector provides the capability to ingest audit and event logs from the Okta API into Microsoft Sentinel. The connector provides visibility into these log types in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.","[{""description"": "">**NOTE:** This connector uses Azure Functions to connect to Okta SSO to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""description"": "">**NOTE:** This connector has been updated, if you have previously deployed an earlier version, and want to update, please delete the existing Okta Azure Function before redeploying this version.""}, {""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**STEP 1 - Configuration steps for the Okta SSO API**\n\n [Follow these instructions](https://developer.okta.com/docs/guides/create-an-api-token/create-the-token/) to create an API Token.""}, {""description"": ""**Note** - For more information on the rate limit restrictions enforced by Okta, please refer to the **[documentation](https://developer.okta.com/docs/reference/rl-global-mgmt/)**.""}, {""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Okta SSO connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Okta SSO API Authorization Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""This method provides an automated deployment of the Okta SSO connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentineloktaazuredeployv2-solution)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **API Token** and **URI**. \n - Use the following schema for the `uri` value: `https:///api/v1/logs?since=` Replace `` with your domain. [Click here](https://developer.okta.com/docs/reference/api-overview/#url-namespace) for further details on how to identify your Okta domain namespace. There is no need to add a time value to the URI, the Function App will dynamically append the inital start time of logs to UTC 0:00 for the current UTC date as time value to the URI in the proper format. \n - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Okta SSO connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the [Azure Function App](https://aka.ms/sentineloktaazurefunctioncodev2) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following five (5) application settings individually, with their respective string values (case-sensitive): \n\t\tapiToken\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\turi\n\t\tlogAnalyticsUri (optional)\n - Use the following schema for the `uri` value: `https:///api/v1/logs?since=` Replace `` with your domain. [Click here](https://developer.okta.com/docs/reference/api-overview/#url-namespace) for further details on how to identify your Okta domain namespace. There is no need to add a time value to the URI, the Function App will dynamically append the inital start time of logs to UTC 0:00 for the current UTC date as time value to the URI in the proper format.\n - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: https://.ods.opinsights.azure.us. \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Okta API Token"", ""description"": ""An Okta API Token is required. See the documentation to learn more about the [Okta System Log API](https://developer.okta.com/docs/reference/api/system-log/).""}]}","true","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Okta%20Single%20Sign-On/Data%20Connectors/OktaNativePollerConnectorV2/azuredeploy_Okta_native_poller_connector_v2.json","true" -"signIns","Okta Single Sign-On","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Okta%20Single%20Sign-On","azuresentinel","azure-sentinel-solution-okta","2022-03-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OktaSingleSignOn(usingAzureFunctions)","Okta","Okta Single Sign-On (using Azure Functions)","The [Okta Single Sign-On (SSO)](https://www.okta.com/products/single-sign-on/) connector provides the capability to ingest audit and event logs from the Okta API into Microsoft Sentinel. The connector provides visibility into these log types in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.","[{""description"": "">**NOTE:** This connector uses Azure Functions to connect to Okta SSO to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""description"": "">**NOTE:** This connector has been updated, if you have previously deployed an earlier version, and want to update, please delete the existing Okta Azure Function before redeploying this version.""}, {""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**STEP 1 - Configuration steps for the Okta SSO API**\n\n [Follow these instructions](https://developer.okta.com/docs/guides/create-an-api-token/create-the-token/) to create an API Token.""}, {""description"": ""**Note** - For more information on the rate limit restrictions enforced by Okta, please refer to the **[documentation](https://developer.okta.com/docs/reference/rl-global-mgmt/)**.""}, {""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Okta SSO connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Okta SSO API Authorization Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""This method provides an automated deployment of the Okta SSO connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentineloktaazuredeployv2-solution)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **API Token** and **URI**. \n - Use the following schema for the `uri` value: `https:///api/v1/logs?since=` Replace `` with your domain. [Click here](https://developer.okta.com/docs/reference/api-overview/#url-namespace) for further details on how to identify your Okta domain namespace. There is no need to add a time value to the URI, the Function App will dynamically append the inital start time of logs to UTC 0:00 for the current UTC date as time value to the URI in the proper format. \n - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Okta SSO connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the [Azure Function App](https://aka.ms/sentineloktaazurefunctioncodev2) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following five (5) application settings individually, with their respective string values (case-sensitive): \n\t\tapiToken\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\turi\n\t\tlogAnalyticsUri (optional)\n - Use the following schema for the `uri` value: `https:///api/v1/logs?since=` Replace `` with your domain. [Click here](https://developer.okta.com/docs/reference/api-overview/#url-namespace) for further details on how to identify your Okta domain namespace. There is no need to add a time value to the URI, the Function App will dynamically append the inital start time of logs to UTC 0:00 for the current UTC date as time value to the URI in the proper format.\n - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: https://.ods.opinsights.azure.us. \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Okta API Token"", ""description"": ""An Okta API Token is required. See the documentation to learn more about the [Okta System Log API](https://developer.okta.com/docs/reference/api/system-log/).""}]}","true","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Okta%20Single%20Sign-On/Data%20Connectors/OktaNativePollerConnectorV2/azuredeploy_Okta_native_poller_connector_v2.json","true" -"","Onapsis Defend","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Onapsis%20Defend","onapsis","azure-sentinel-solution-onapsis-defend","2025-07-17","2025-07-17","","Onapsis","Partner","https://onapsis.com/support/","","domains","","","","","","","false","","false" -"Onapsis_Defend_CL","Onapsis Defend","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Onapsis%20Defend","onapsis","azure-sentinel-solution-onapsis-defend","2025-07-17","2025-07-17","","Onapsis","Partner","https://onapsis.com/support/","","domains","Onapsis","Onapsis Platform","Onapsis Defend Integration","Onapsis Defend Integration is aimed at forwarding alerts and logs collected and detected by Onapsis Platform into Microsoft Sentinel SIEM","[{""title"": ""1. Create ARM Resources and Provide the Required Permissions"", ""description"": ""We will create data collection rule (DCR) and data collection endpoint (DCE) resources. We will also create a Microsoft Entra app registration and assign the required permissions to it."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Automated deployment of Azure resources\nClicking on \""Deploy push connector resources\"" will trigger the creation of DCR and DCE resources.\nIt will then create a Microsoft Entra app registration with client secret and grant permissions on the DCR. This setup enables data to be sent securely to the DCR using a OAuth v2 client credentials.""}}, {""parameters"": {""label"": ""Deploy push connector resources"", ""applicationDisplayName"": ""Onapsis Defend Integration push to Microsoft Sentinel""}, ""type"": ""DeployPushConnectorButton_test""}]}, {""title"": ""2. Maintain the data collection endpoint details and authentication info in Onapsis Defend Integration"", ""description"": ""Share the data collection endpoint URL and authentication info with the Onapsis Defend Integration administrator to configure the Onapsis Defend Integration to send data to the data collection endpoint."", ""instructions"": [{""parameters"": {""label"": ""Use this value to configure as Tenant ID in the LogIngestionAPI credential."", ""fillWith"": [""TenantId""]}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra Application ID"", ""fillWith"": [""ApplicationId""], ""placeholder"": ""Deploy push connector to get the Application ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra Application Secret"", ""fillWith"": [""ApplicationSecret""], ""placeholder"": ""Deploy push connector to get the Application Secret""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Use this value to configure the LogsIngestionURL parameter when deploying the IFlow."", ""fillWith"": [""DataCollectionEndpoint""], ""placeholder"": ""Deploy push connector to get the DCE URI""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""DCR Immutable ID"", ""fillWith"": [""DataCollectionRuleId""], ""placeholder"": ""Deploy push connector to get the DCR ID""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft Entra"", ""description"": ""Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher.""}, {""name"": ""Microsoft Azure"", ""description"": ""Permission to assign Monitoring Metrics Publisher role on data collection rules. Typically requires Azure RBAC Owner or User Access Administrator role.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Onapsis%20Defend/Data%20Connectors/Onapsis.json","true" -"Onapsis_Defend_CL","Onapsis Defend","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Onapsis%20Defend","onapsis","azure-sentinel-solution-onapsis-defend","2025-07-17","2025-07-17","","Onapsis","Partner","https://onapsis.com/support/","","domains","Onapsis","Onapsis SE","Onapsis Defend: Integrate Unmatched SAP Threat Detection & Intel with Microsoft Sentinel","Empower security teams with deep visibility into unique exploit, zero-day, and threat actor activity; suspicious user or insider behavior; sensitive data downloads; security control violations; and more - all enriched by the SAP experts at Onapsis.","[{""title"": ""1. Create ARM Resources and Provide the Required Permissions"", ""description"": ""We will create data collection rule (DCR) and data collection endpoint (DCE) resources. We will also create a Microsoft Entra app registration and assign the required permissions to it."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Automated deployment of Azure resources\nClicking on \""Deploy push connector resources\"" will trigger the creation of DCR and DCE resources.\nIt will then create a Microsoft Entra app registration with client secret and grant permissions on the DCR. This setup enables data to be sent securely to the DCR using a OAuth v2 client credentials.""}}, {""parameters"": {""label"": ""Deploy push connector resources"", ""applicationDisplayName"": ""Onapsis Defend Integration push to Microsoft Sentinel""}, ""type"": ""DeployPushConnectorButton_test""}]}, {""title"": ""2. Maintain the data collection endpoint details and authentication info in Onapsis Defend Integration"", ""description"": ""Share the data collection endpoint URL and authentication info with the Onapsis Defend Integration administrator to configure the Onapsis Defend Integration to send data to the data collection endpoint."", ""instructions"": [{""parameters"": {""label"": ""Tenant ID | Use this value to configure as Tenant ID"", ""fillWith"": [""TenantId""]}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra Application ID | Use this value for the Client ID"", ""fillWith"": [""ApplicationId""], ""placeholder"": ""Deploy push connector to get the Application ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra Application Secret | Use this value for the Token"", ""fillWith"": [""ApplicationSecret""], ""placeholder"": ""Deploy push connector to get the Application Secret""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""LogIngestionURL | Use this value for the URL parameter"", ""fillWith"": [""DataCollectionEndpoint""], ""placeholder"": ""Deploy push connector to get the DCE URI""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""DCR Immutable ID | Use this value for the DCR_ID parameter"", ""fillWith"": [""DataCollectionRuleId""], ""placeholder"": ""Deploy push connector to get the DCR ID""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft Entra"", ""description"": ""Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher.""}, {""name"": ""Microsoft Azure"", ""description"": ""Permission to assign Monitoring Metrics Publisher role on data collection rules. Typically requires Azure RBAC Owner or User Access Administrator role.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Onapsis%20Defend/Data%20Connectors/Onapsis_PUSH_CCP/Onapsis_connectorDefinition.json","true" -"","Onapsis Platform","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Onapsis%20Platform","onapsis","onapsis_mss","2022-05-11","","","Onapsis","Partner","https://onapsis.com/company/contact-us","","domains","","","","","","","false","","false" -"CommonSecurityLog","Onapsis Platform","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Onapsis%20Platform","onapsis","onapsis_mss","2022-05-11","","","Onapsis","Partner","https://onapsis.com/company/contact-us","","domains","OnapsisPlatform","Onapsis","[Deprecated] Onapsis Platform","The Onapsis Connector allows you to export the alarms triggered in the Onapsis Platform into Microsoft Sentinel in real-time. This gives you the ability to monitor the activity on your SAP systems, identify incidents and respond to them quickly.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your Onapsis Console and Microsoft Sentinel. This machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Refer to the Onapsis in-product help to set up log forwarding to the Syslog agent.\n\n> 1. Go to Setup > Third-party integrations > Defend Alarms and follow the instructions for Microsoft Sentinel.\n\n> 2. Make sure your Onapsis Console can reach the proxy machine where the agent is installed - logs should be sent to port 514 using TCP.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Create Onapsis lookup function for incident enrichment"", ""description"": ""[Follow these steps to get this Kusto function](https://aka.ms/sentinel-Onapsis-parser)""}, {""title"": ""5. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Onapsis%20Platform/Data%20Connectors/OnapsisPlatform.json","true" -"","OneIdentity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneIdentity","quest","oneidentity_mss","2022-05-02","","","One Identity","Partner","https://support.oneidentity.com/","","domains","","","","","","","false","","false" -"CommonSecurityLog","OneIdentity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneIdentity","quest","oneidentity_mss","2022-05-02","","","One Identity","Partner","https://support.oneidentity.com/","","domains","OneIdentity","One Identity LLC.","One Identity Safeguard","The One Identity Safeguard (CEF) Sentinel data connector enhances the standard Common Event Format (CEF) connector with Safeguard for Privileged Sessions-specific dashboards. Use this connector to easily start utilizing the events generated by your device for visualization, alerts, investigations and more.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python --version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward One Identity SafeGuard logs to Syslog agent"", ""description"": ""Follow the [instructions in the Safeguard for Privileged Sessions Administration Guide](https://aka.ms/sentinel-cef-oneidentity-forwarding) in section \""Universal SIEM Forwarder\"". Make sure to select the format \""CEF\"".\n\u00a0\nNote that by default there is no TLS security set up in the syslog on the Linux machine.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python --version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneIdentity/Data%20Connectors/OneIdentity.JSON","true" -"","OneLoginIAM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneLoginIAM","azuresentinel","azure-sentinel-solution-oneloginiam","2022-08-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"OneLoginEventsV2_CL","OneLoginIAM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneLoginIAM","azuresentinel","azure-sentinel-solution-oneloginiam","2022-08-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OneLogin","OneLogin","[DEPRECATED] OneLogin IAM Platform","The [OneLogin](https://www.onelogin.com/) data connector provides the capability to ingest common OneLogin IAM Platform events into Microsoft Sentinel through Webhooks. The OneLogin Event Webhook API which is also known as the Event Broadcaster will send batches of events in near real-time to an endpoint that you specify. When a change occurs in the OneLogin, an HTTPS POST request with event information is sent to a callback data connector URL. Refer to [Webhooks documentation](https://developers.onelogin.com/api-docs/1/events/webhooks) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This data connector uses Azure Functions based on HTTP Trigger for waiting POST requests with logs to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**OneLogin**](https://aka.ms/sentinel-OneLogin-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the OneLogin**\n\n Follow the [instructions](https://onelogin.service-now.com/kb_view_customer.do?sysparm_article=KB0010469) to configure Webhooks.\n\n1. Generate the **OneLoginBearerToken** according to your password policy.\n2. Set Custom Header in the format: Authorization: Bearer .\n3. Use JSON Array Logs Format.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the OneLogin data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the OneLogin data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-OneLogin-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **OneLoginBearerToken** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n6. After deploying open Function App page, select your app, go to the **Functions** and click **Get Function Url** copy it and follow p.7 from STEP 1.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the OneLogin data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-OneLogin-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tOneLoginBearerToken\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Webhooks Credentials/permissions"", ""description"": ""**OneLoginBearerToken**, **Callback URL** are required for working Webhooks. See the documentation to learn more about [configuring Webhooks](https://onelogin.service-now.com/kb_view_customer.do?sysparm_article=KB0010469).You need to generate **OneLoginBearerToken** according to your security requirements and use it in **Custom Headers** section in format: Authorization: Bearer **OneLoginBearerToken**. Logs Format: JSON Array.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneLoginIAM/Data%20Connectors/OneLogin_Webhooks_FunctionApp.json","true" -"OneLoginUsersV2_CL","OneLoginIAM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneLoginIAM","azuresentinel","azure-sentinel-solution-oneloginiam","2022-08-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OneLogin","OneLogin","[DEPRECATED] OneLogin IAM Platform","The [OneLogin](https://www.onelogin.com/) data connector provides the capability to ingest common OneLogin IAM Platform events into Microsoft Sentinel through Webhooks. The OneLogin Event Webhook API which is also known as the Event Broadcaster will send batches of events in near real-time to an endpoint that you specify. When a change occurs in the OneLogin, an HTTPS POST request with event information is sent to a callback data connector URL. Refer to [Webhooks documentation](https://developers.onelogin.com/api-docs/1/events/webhooks) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This data connector uses Azure Functions based on HTTP Trigger for waiting POST requests with logs to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**OneLogin**](https://aka.ms/sentinel-OneLogin-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the OneLogin**\n\n Follow the [instructions](https://onelogin.service-now.com/kb_view_customer.do?sysparm_article=KB0010469) to configure Webhooks.\n\n1. Generate the **OneLoginBearerToken** according to your password policy.\n2. Set Custom Header in the format: Authorization: Bearer .\n3. Use JSON Array Logs Format.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the OneLogin data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the OneLogin data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-OneLogin-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **OneLoginBearerToken** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n6. After deploying open Function App page, select your app, go to the **Functions** and click **Get Function Url** copy it and follow p.7 from STEP 1.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the OneLogin data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-OneLogin-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tOneLoginBearerToken\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Webhooks Credentials/permissions"", ""description"": ""**OneLoginBearerToken**, **Callback URL** are required for working Webhooks. See the documentation to learn more about [configuring Webhooks](https://onelogin.service-now.com/kb_view_customer.do?sysparm_article=KB0010469).You need to generate **OneLoginBearerToken** according to your security requirements and use it in **Custom Headers** section in format: Authorization: Bearer **OneLoginBearerToken**. Logs Format: JSON Array.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneLoginIAM/Data%20Connectors/OneLogin_Webhooks_FunctionApp.json","true" -"OneLogin_CL","OneLoginIAM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneLoginIAM","azuresentinel","azure-sentinel-solution-oneloginiam","2022-08-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OneLogin","OneLogin","[DEPRECATED] OneLogin IAM Platform","The [OneLogin](https://www.onelogin.com/) data connector provides the capability to ingest common OneLogin IAM Platform events into Microsoft Sentinel through Webhooks. The OneLogin Event Webhook API which is also known as the Event Broadcaster will send batches of events in near real-time to an endpoint that you specify. When a change occurs in the OneLogin, an HTTPS POST request with event information is sent to a callback data connector URL. Refer to [Webhooks documentation](https://developers.onelogin.com/api-docs/1/events/webhooks) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This data connector uses Azure Functions based on HTTP Trigger for waiting POST requests with logs to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**OneLogin**](https://aka.ms/sentinel-OneLogin-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the OneLogin**\n\n Follow the [instructions](https://onelogin.service-now.com/kb_view_customer.do?sysparm_article=KB0010469) to configure Webhooks.\n\n1. Generate the **OneLoginBearerToken** according to your password policy.\n2. Set Custom Header in the format: Authorization: Bearer .\n3. Use JSON Array Logs Format.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the OneLogin data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the OneLogin data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-OneLogin-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **OneLoginBearerToken** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n6. After deploying open Function App page, select your app, go to the **Functions** and click **Get Function Url** copy it and follow p.7 from STEP 1.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the OneLogin data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-OneLogin-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tOneLoginBearerToken\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Webhooks Credentials/permissions"", ""description"": ""**OneLoginBearerToken**, **Callback URL** are required for working Webhooks. See the documentation to learn more about [configuring Webhooks](https://onelogin.service-now.com/kb_view_customer.do?sysparm_article=KB0010469).You need to generate **OneLoginBearerToken** according to your security requirements and use it in **Custom Headers** section in format: Authorization: Bearer **OneLoginBearerToken**. Logs Format: JSON Array.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneLoginIAM/Data%20Connectors/OneLogin_Webhooks_FunctionApp.json","true" -"OneLoginEventsV2_CL","OneLoginIAM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneLoginIAM","azuresentinel","azure-sentinel-solution-oneloginiam","2022-08-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OneLoginIAMLogsCCPDefinition","Microsoft","OneLogin IAM Platform (via Codeless Connector Framework)","The [OneLogin](https://www.onelogin.com/) data connector provides the capability to ingest common OneLogin IAM Platform events into Microsoft Sentinel through REST API by using OneLogin [Events API](https://developers.onelogin.com/api-docs/1/events/get-events) and OneLogin [Users API](https://developers.onelogin.com/api-docs/1/users/get-users). The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""description"": ""To ingest data from OneLogin IAM to Microsoft Sentinel, you have to click on Add Domain button below then you get a pop up to fill the details, provide the required information and click on Connect. You can see the domain endpoints connected in the grid.\n>"", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Endpoint"", ""columnValue"": ""properties.request.apiEndpoint""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add domain"", ""title"": ""Add domain"", ""subtitle"": ""Add domain"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""OneLogin Domain"", ""placeholder"": ""Enter your Company's OneLogin Domain"", ""type"": ""text"", ""name"": ""domainName"", ""required"": true, ""description"": ""For example, if your OneLogin Domain is test.onelogin.com, you need to enter only test in the above field.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client ID"", ""placeholder"": ""Enter your Client ID"", ""type"": ""text"", ""name"": ""cId"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client Secret"", ""placeholder"": ""Enter your Client Secret"", ""type"": ""password"", ""name"": ""cSec"", ""required"": true}}]}]}}], ""title"": ""Connect OneLogin IAM Platform to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""OneLogin IAM API Credentials"", ""description"": ""To create API Credentials follow the document link provided here, [Click Here](https://developers.onelogin.com/api-docs/1/getting-started/working-with-api-credentials). \n Make sure to have an account type of either account owner or administrator to create the API credentials. \n Once you create the API Credentials you get your Client ID and Client Secret.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneLoginIAM/Data%20Connectors/OneLoginIAMLogs_ccp/OneLoginIAMLogs_ConnectorDefinition.json","true" -"OneLoginUsersV2_CL","OneLoginIAM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneLoginIAM","azuresentinel","azure-sentinel-solution-oneloginiam","2022-08-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OneLoginIAMLogsCCPDefinition","Microsoft","OneLogin IAM Platform (via Codeless Connector Framework)","The [OneLogin](https://www.onelogin.com/) data connector provides the capability to ingest common OneLogin IAM Platform events into Microsoft Sentinel through REST API by using OneLogin [Events API](https://developers.onelogin.com/api-docs/1/events/get-events) and OneLogin [Users API](https://developers.onelogin.com/api-docs/1/users/get-users). The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""description"": ""To ingest data from OneLogin IAM to Microsoft Sentinel, you have to click on Add Domain button below then you get a pop up to fill the details, provide the required information and click on Connect. You can see the domain endpoints connected in the grid.\n>"", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Endpoint"", ""columnValue"": ""properties.request.apiEndpoint""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add domain"", ""title"": ""Add domain"", ""subtitle"": ""Add domain"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""OneLogin Domain"", ""placeholder"": ""Enter your Company's OneLogin Domain"", ""type"": ""text"", ""name"": ""domainName"", ""required"": true, ""description"": ""For example, if your OneLogin Domain is test.onelogin.com, you need to enter only test in the above field.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client ID"", ""placeholder"": ""Enter your Client ID"", ""type"": ""text"", ""name"": ""cId"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client Secret"", ""placeholder"": ""Enter your Client Secret"", ""type"": ""password"", ""name"": ""cSec"", ""required"": true}}]}]}}], ""title"": ""Connect OneLogin IAM Platform to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""OneLogin IAM API Credentials"", ""description"": ""To create API Credentials follow the document link provided here, [Click Here](https://developers.onelogin.com/api-docs/1/getting-started/working-with-api-credentials). \n Make sure to have an account type of either account owner or administrator to create the API credentials. \n Once you create the API Credentials you get your Client ID and Client Secret.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneLoginIAM/Data%20Connectors/OneLoginIAMLogs_ccp/OneLoginIAMLogs_ConnectorDefinition.json","true" -"","OneTrust","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneTrust","onetrustllc1594047340198","azure-sentinel-solution-onetrust","2025-10-24","2025-10-24","","OneTrust, LLC","Partner","https://www.onetrust.com/support/","","domains","","","","","","","false","","false" -"OneTrustMetadataV3_CL","OneTrust","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneTrust","onetrustllc1594047340198","azure-sentinel-solution-onetrust","2025-10-24","2025-10-24","","OneTrust, LLC","Partner","https://www.onetrust.com/support/","","domains","OnetrustPush","OneTrust","OneTrust","The OneTrust connector for Microsoft Sentinel provides the capability to have near real time visibility into where sensitive data has been located or remediated across across Google Cloud and other OneTrust supported data sources.","[{""title"": ""1. Create ARM Resources and Provide the Required Permissions"", ""description"": ""This connector reads data from the tables that OneTrust uses in a Microsoft Analytics Workspace. If OneTrust's data forwarding option is enabled then raw event data can be sent to the Microsoft Sentinel Ingestion API."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Automated Configuration and Secure Data Ingestion with Entra Application \nClicking on \""Deploy\"" will trigger the creation of Log Analytics tables and a Data Collection Rule (DCR). \nIt will then create an Entra application, link the DCR to it, and set the entered secret in the application. This setup enables data to be sent securely to the DCR using an Entra token.""}}, {""parameters"": {""label"": ""Deploy OneTrust connector resources"", ""applicationDisplayName"": ""OneTrust Connector Application""}, ""type"": ""DeployPushConnectorButton""}]}, {""title"": ""2. Push your logs into the workspace"", ""description"": ""Use the following parameters to configure the your machine to send the logs to the workspace."", ""instructions"": [{""parameters"": {""label"": ""Tenant ID (Directory ID)"", ""fillWith"": [""TenantId""]}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Application ID"", ""fillWith"": [""ApplicationId""], ""placeholder"": ""Deploy push connector to get the App Registration Application ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Secret"", ""fillWith"": [""ApplicationSecret""], ""placeholder"": ""Deploy push connector to get the App Registration Secret""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Endpoint Uri"", ""fillWith"": [""DataCollectionEndpoint""], ""placeholder"": ""Deploy push connector to get the Data Collection Endpoint Uri""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Rule Immutable ID"", ""fillWith"": [""DataCollectionRuleId""], ""placeholder"": ""Deploy push connector to get the Data Collection Rule Immutable ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""OneTrust Metadata Stream Name"", ""value"": ""Custom-OneTrustMetadataV3""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft Entra"", ""description"": ""Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher.""}, {""name"": ""Microsoft Azure"", ""description"": ""Permission to assign Monitoring Metrics Publisher role on data collection rule (DCR). Typically requires Azure RBAC Owner or User Access Administrator role""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneTrust/Data%20Connectors/OneTrustLogs_CCF/OneTrustLogs_connectorDefinition.json","true" -"","Open Systems","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Open%20Systems","opensystemsag1582030008223","azure-sentinel-solution-osag","2025-05-12","","","Open Systems","Partner","https://www.open-systems.com/support","","domains","","","","","","","false","","false" -"OpenSystemsAuthenticationLogs_CL","Open Systems","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Open%20Systems","opensystemsag1582030008223","azure-sentinel-solution-osag","2025-05-12","","","Open Systems","Partner","https://www.open-systems.com/support","","domains","OpenSystems","Open Systems","Open Systems Data Connector","The Open Systems Logs API Microsoft Sentinel Connector provides the capability to ingest Open Systems logs into Microsoft Sentinel using Open Systems Logs API.","[{""title"": ""STEP 1: Prerequisites"", ""description"": ""Ensure you have the following information and permissions before proceeding: \n1. Open Systems Logs API endpoint and connection String. \n2. Service Principal credentials (Client ID, Client Secret, Object/Principal ID). \n3. Permissions to deploy Azure Container Apps, Managed Environments, Data Collection Rules (DCRs), Data Collection Endpoints (DCEs), and create Role Assignments (typically 'Contributor' role on the subscription or resource group).""}, {""title"": ""STEP 2: Deploy the Connector"", ""description"": ""Deploy the ARM template to set up the data processing resources, including the data collection rule and associated components.\n\n1. Click the **Deploy to Azure** button below. This will take you to the Azure portal.\n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-OpenSystemsLogsAPI-azuredeploy)\n\n2. In the Azure portal, select your desired **Subscription**, **Resource Group**, and **Region**.\n3. Provide the required parameters, including those gathered in the prerequisites step (Open Systems Logs API details, Service Principal credentials, etc.), when prompted by the deployment wizard.\n4. Review the terms and click **Review + create**, then **Create** to start the deployment.""}, {""title"": ""STEP 3: Post-Deployment Verification"", ""description"": ""After successful deployment: \n1. Verify that the Azure Container App running the processor is in a 'Running' state. \n2. Check the `OpenSystemsZtnaLogs_CL`, `OpenSystemsFirewallLogs_CL`, `OpenSystemsAuthenticationLogs_CL`, and `OpenSystemsProxyLogs_CL` tables in your Log Analytics workspace for incoming data. It may take some time for logs to appear after initial setup. \n3. Use the sample queries provided in the 'Next Steps' tab of this data connector page to view and analyze your logs.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Container Apps, DCRs, and DCEs"", ""description"": ""Permissions to deploy Azure Container Apps, Managed Environments, Data Collection Rules (DCRs), and Data Collection Endpoints (DCEs) are required. This is typically covered by having the 'Contributor' role on the subscription or resource group.""}, {""name"": ""Role Assignment Permissions"", ""description"": ""Permissions to create role assignments (specifically 'Monitoring Metrics Publisher' on DCRs) are required for the deploying user or service principal.""}, {""name"": ""Required Credentials for ARM Template"", ""description"": ""During deployment, you will need to provide: Open Systems Logs API endpoint and connection string, and Service Principal credentials (Client ID, Client Secret, Object/Principal ID).""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Custom prerequisites if necessary, otherwise delete this customs tag"", ""description"": ""Description for any custom pre-requisites""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Open%20Systems/Data%20Connectors/OpenSystems.json","true" -"OpenSystemsFirewallLogs_CL","Open Systems","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Open%20Systems","opensystemsag1582030008223","azure-sentinel-solution-osag","2025-05-12","","","Open Systems","Partner","https://www.open-systems.com/support","","domains","OpenSystems","Open Systems","Open Systems Data Connector","The Open Systems Logs API Microsoft Sentinel Connector provides the capability to ingest Open Systems logs into Microsoft Sentinel using Open Systems Logs API.","[{""title"": ""STEP 1: Prerequisites"", ""description"": ""Ensure you have the following information and permissions before proceeding: \n1. Open Systems Logs API endpoint and connection String. \n2. Service Principal credentials (Client ID, Client Secret, Object/Principal ID). \n3. Permissions to deploy Azure Container Apps, Managed Environments, Data Collection Rules (DCRs), Data Collection Endpoints (DCEs), and create Role Assignments (typically 'Contributor' role on the subscription or resource group).""}, {""title"": ""STEP 2: Deploy the Connector"", ""description"": ""Deploy the ARM template to set up the data processing resources, including the data collection rule and associated components.\n\n1. Click the **Deploy to Azure** button below. This will take you to the Azure portal.\n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-OpenSystemsLogsAPI-azuredeploy)\n\n2. In the Azure portal, select your desired **Subscription**, **Resource Group**, and **Region**.\n3. Provide the required parameters, including those gathered in the prerequisites step (Open Systems Logs API details, Service Principal credentials, etc.), when prompted by the deployment wizard.\n4. Review the terms and click **Review + create**, then **Create** to start the deployment.""}, {""title"": ""STEP 3: Post-Deployment Verification"", ""description"": ""After successful deployment: \n1. Verify that the Azure Container App running the processor is in a 'Running' state. \n2. Check the `OpenSystemsZtnaLogs_CL`, `OpenSystemsFirewallLogs_CL`, `OpenSystemsAuthenticationLogs_CL`, and `OpenSystemsProxyLogs_CL` tables in your Log Analytics workspace for incoming data. It may take some time for logs to appear after initial setup. \n3. Use the sample queries provided in the 'Next Steps' tab of this data connector page to view and analyze your logs.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Container Apps, DCRs, and DCEs"", ""description"": ""Permissions to deploy Azure Container Apps, Managed Environments, Data Collection Rules (DCRs), and Data Collection Endpoints (DCEs) are required. This is typically covered by having the 'Contributor' role on the subscription or resource group.""}, {""name"": ""Role Assignment Permissions"", ""description"": ""Permissions to create role assignments (specifically 'Monitoring Metrics Publisher' on DCRs) are required for the deploying user or service principal.""}, {""name"": ""Required Credentials for ARM Template"", ""description"": ""During deployment, you will need to provide: Open Systems Logs API endpoint and connection string, and Service Principal credentials (Client ID, Client Secret, Object/Principal ID).""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Custom prerequisites if necessary, otherwise delete this customs tag"", ""description"": ""Description for any custom pre-requisites""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Open%20Systems/Data%20Connectors/OpenSystems.json","true" -"OpenSystemsImAuthentication","Open Systems","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Open%20Systems","opensystemsag1582030008223","azure-sentinel-solution-osag","2025-05-12","","","Open Systems","Partner","https://www.open-systems.com/support","","domains","OpenSystems","Open Systems","Open Systems Data Connector","The Open Systems Logs API Microsoft Sentinel Connector provides the capability to ingest Open Systems logs into Microsoft Sentinel using Open Systems Logs API.","[{""title"": ""STEP 1: Prerequisites"", ""description"": ""Ensure you have the following information and permissions before proceeding: \n1. Open Systems Logs API endpoint and connection String. \n2. Service Principal credentials (Client ID, Client Secret, Object/Principal ID). \n3. Permissions to deploy Azure Container Apps, Managed Environments, Data Collection Rules (DCRs), Data Collection Endpoints (DCEs), and create Role Assignments (typically 'Contributor' role on the subscription or resource group).""}, {""title"": ""STEP 2: Deploy the Connector"", ""description"": ""Deploy the ARM template to set up the data processing resources, including the data collection rule and associated components.\n\n1. Click the **Deploy to Azure** button below. This will take you to the Azure portal.\n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-OpenSystemsLogsAPI-azuredeploy)\n\n2. In the Azure portal, select your desired **Subscription**, **Resource Group**, and **Region**.\n3. Provide the required parameters, including those gathered in the prerequisites step (Open Systems Logs API details, Service Principal credentials, etc.), when prompted by the deployment wizard.\n4. Review the terms and click **Review + create**, then **Create** to start the deployment.""}, {""title"": ""STEP 3: Post-Deployment Verification"", ""description"": ""After successful deployment: \n1. Verify that the Azure Container App running the processor is in a 'Running' state. \n2. Check the `OpenSystemsZtnaLogs_CL`, `OpenSystemsFirewallLogs_CL`, `OpenSystemsAuthenticationLogs_CL`, and `OpenSystemsProxyLogs_CL` tables in your Log Analytics workspace for incoming data. It may take some time for logs to appear after initial setup. \n3. Use the sample queries provided in the 'Next Steps' tab of this data connector page to view and analyze your logs.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Container Apps, DCRs, and DCEs"", ""description"": ""Permissions to deploy Azure Container Apps, Managed Environments, Data Collection Rules (DCRs), and Data Collection Endpoints (DCEs) are required. This is typically covered by having the 'Contributor' role on the subscription or resource group.""}, {""name"": ""Role Assignment Permissions"", ""description"": ""Permissions to create role assignments (specifically 'Monitoring Metrics Publisher' on DCRs) are required for the deploying user or service principal.""}, {""name"": ""Required Credentials for ARM Template"", ""description"": ""During deployment, you will need to provide: Open Systems Logs API endpoint and connection string, and Service Principal credentials (Client ID, Client Secret, Object/Principal ID).""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Custom prerequisites if necessary, otherwise delete this customs tag"", ""description"": ""Description for any custom pre-requisites""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Open%20Systems/Data%20Connectors/OpenSystems.json","true" -"OpenSystemsImNetworkSessionFirewall","Open Systems","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Open%20Systems","opensystemsag1582030008223","azure-sentinel-solution-osag","2025-05-12","","","Open Systems","Partner","https://www.open-systems.com/support","","domains","OpenSystems","Open Systems","Open Systems Data Connector","The Open Systems Logs API Microsoft Sentinel Connector provides the capability to ingest Open Systems logs into Microsoft Sentinel using Open Systems Logs API.","[{""title"": ""STEP 1: Prerequisites"", ""description"": ""Ensure you have the following information and permissions before proceeding: \n1. Open Systems Logs API endpoint and connection String. \n2. Service Principal credentials (Client ID, Client Secret, Object/Principal ID). \n3. Permissions to deploy Azure Container Apps, Managed Environments, Data Collection Rules (DCRs), Data Collection Endpoints (DCEs), and create Role Assignments (typically 'Contributor' role on the subscription or resource group).""}, {""title"": ""STEP 2: Deploy the Connector"", ""description"": ""Deploy the ARM template to set up the data processing resources, including the data collection rule and associated components.\n\n1. Click the **Deploy to Azure** button below. This will take you to the Azure portal.\n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-OpenSystemsLogsAPI-azuredeploy)\n\n2. In the Azure portal, select your desired **Subscription**, **Resource Group**, and **Region**.\n3. Provide the required parameters, including those gathered in the prerequisites step (Open Systems Logs API details, Service Principal credentials, etc.), when prompted by the deployment wizard.\n4. Review the terms and click **Review + create**, then **Create** to start the deployment.""}, {""title"": ""STEP 3: Post-Deployment Verification"", ""description"": ""After successful deployment: \n1. Verify that the Azure Container App running the processor is in a 'Running' state. \n2. Check the `OpenSystemsZtnaLogs_CL`, `OpenSystemsFirewallLogs_CL`, `OpenSystemsAuthenticationLogs_CL`, and `OpenSystemsProxyLogs_CL` tables in your Log Analytics workspace for incoming data. It may take some time for logs to appear after initial setup. \n3. Use the sample queries provided in the 'Next Steps' tab of this data connector page to view and analyze your logs.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Container Apps, DCRs, and DCEs"", ""description"": ""Permissions to deploy Azure Container Apps, Managed Environments, Data Collection Rules (DCRs), and Data Collection Endpoints (DCEs) are required. This is typically covered by having the 'Contributor' role on the subscription or resource group.""}, {""name"": ""Role Assignment Permissions"", ""description"": ""Permissions to create role assignments (specifically 'Monitoring Metrics Publisher' on DCRs) are required for the deploying user or service principal.""}, {""name"": ""Required Credentials for ARM Template"", ""description"": ""During deployment, you will need to provide: Open Systems Logs API endpoint and connection string, and Service Principal credentials (Client ID, Client Secret, Object/Principal ID).""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Custom prerequisites if necessary, otherwise delete this customs tag"", ""description"": ""Description for any custom pre-requisites""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Open%20Systems/Data%20Connectors/OpenSystems.json","true" -"OpenSystemsImNetworkSessionProxy","Open Systems","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Open%20Systems","opensystemsag1582030008223","azure-sentinel-solution-osag","2025-05-12","","","Open Systems","Partner","https://www.open-systems.com/support","","domains","OpenSystems","Open Systems","Open Systems Data Connector","The Open Systems Logs API Microsoft Sentinel Connector provides the capability to ingest Open Systems logs into Microsoft Sentinel using Open Systems Logs API.","[{""title"": ""STEP 1: Prerequisites"", ""description"": ""Ensure you have the following information and permissions before proceeding: \n1. Open Systems Logs API endpoint and connection String. \n2. Service Principal credentials (Client ID, Client Secret, Object/Principal ID). \n3. Permissions to deploy Azure Container Apps, Managed Environments, Data Collection Rules (DCRs), Data Collection Endpoints (DCEs), and create Role Assignments (typically 'Contributor' role on the subscription or resource group).""}, {""title"": ""STEP 2: Deploy the Connector"", ""description"": ""Deploy the ARM template to set up the data processing resources, including the data collection rule and associated components.\n\n1. Click the **Deploy to Azure** button below. This will take you to the Azure portal.\n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-OpenSystemsLogsAPI-azuredeploy)\n\n2. In the Azure portal, select your desired **Subscription**, **Resource Group**, and **Region**.\n3. Provide the required parameters, including those gathered in the prerequisites step (Open Systems Logs API details, Service Principal credentials, etc.), when prompted by the deployment wizard.\n4. Review the terms and click **Review + create**, then **Create** to start the deployment.""}, {""title"": ""STEP 3: Post-Deployment Verification"", ""description"": ""After successful deployment: \n1. Verify that the Azure Container App running the processor is in a 'Running' state. \n2. Check the `OpenSystemsZtnaLogs_CL`, `OpenSystemsFirewallLogs_CL`, `OpenSystemsAuthenticationLogs_CL`, and `OpenSystemsProxyLogs_CL` tables in your Log Analytics workspace for incoming data. It may take some time for logs to appear after initial setup. \n3. Use the sample queries provided in the 'Next Steps' tab of this data connector page to view and analyze your logs.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Container Apps, DCRs, and DCEs"", ""description"": ""Permissions to deploy Azure Container Apps, Managed Environments, Data Collection Rules (DCRs), and Data Collection Endpoints (DCEs) are required. This is typically covered by having the 'Contributor' role on the subscription or resource group.""}, {""name"": ""Role Assignment Permissions"", ""description"": ""Permissions to create role assignments (specifically 'Monitoring Metrics Publisher' on DCRs) are required for the deploying user or service principal.""}, {""name"": ""Required Credentials for ARM Template"", ""description"": ""During deployment, you will need to provide: Open Systems Logs API endpoint and connection string, and Service Principal credentials (Client ID, Client Secret, Object/Principal ID).""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Custom prerequisites if necessary, otherwise delete this customs tag"", ""description"": ""Description for any custom pre-requisites""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Open%20Systems/Data%20Connectors/OpenSystems.json","true" -"OpenSystemsImZTNA","Open Systems","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Open%20Systems","opensystemsag1582030008223","azure-sentinel-solution-osag","2025-05-12","","","Open Systems","Partner","https://www.open-systems.com/support","","domains","OpenSystems","Open Systems","Open Systems Data Connector","The Open Systems Logs API Microsoft Sentinel Connector provides the capability to ingest Open Systems logs into Microsoft Sentinel using Open Systems Logs API.","[{""title"": ""STEP 1: Prerequisites"", ""description"": ""Ensure you have the following information and permissions before proceeding: \n1. Open Systems Logs API endpoint and connection String. \n2. Service Principal credentials (Client ID, Client Secret, Object/Principal ID). \n3. Permissions to deploy Azure Container Apps, Managed Environments, Data Collection Rules (DCRs), Data Collection Endpoints (DCEs), and create Role Assignments (typically 'Contributor' role on the subscription or resource group).""}, {""title"": ""STEP 2: Deploy the Connector"", ""description"": ""Deploy the ARM template to set up the data processing resources, including the data collection rule and associated components.\n\n1. Click the **Deploy to Azure** button below. This will take you to the Azure portal.\n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-OpenSystemsLogsAPI-azuredeploy)\n\n2. In the Azure portal, select your desired **Subscription**, **Resource Group**, and **Region**.\n3. Provide the required parameters, including those gathered in the prerequisites step (Open Systems Logs API details, Service Principal credentials, etc.), when prompted by the deployment wizard.\n4. Review the terms and click **Review + create**, then **Create** to start the deployment.""}, {""title"": ""STEP 3: Post-Deployment Verification"", ""description"": ""After successful deployment: \n1. Verify that the Azure Container App running the processor is in a 'Running' state. \n2. Check the `OpenSystemsZtnaLogs_CL`, `OpenSystemsFirewallLogs_CL`, `OpenSystemsAuthenticationLogs_CL`, and `OpenSystemsProxyLogs_CL` tables in your Log Analytics workspace for incoming data. It may take some time for logs to appear after initial setup. \n3. Use the sample queries provided in the 'Next Steps' tab of this data connector page to view and analyze your logs.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Container Apps, DCRs, and DCEs"", ""description"": ""Permissions to deploy Azure Container Apps, Managed Environments, Data Collection Rules (DCRs), and Data Collection Endpoints (DCEs) are required. This is typically covered by having the 'Contributor' role on the subscription or resource group.""}, {""name"": ""Role Assignment Permissions"", ""description"": ""Permissions to create role assignments (specifically 'Monitoring Metrics Publisher' on DCRs) are required for the deploying user or service principal.""}, {""name"": ""Required Credentials for ARM Template"", ""description"": ""During deployment, you will need to provide: Open Systems Logs API endpoint and connection string, and Service Principal credentials (Client ID, Client Secret, Object/Principal ID).""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Custom prerequisites if necessary, otherwise delete this customs tag"", ""description"": ""Description for any custom pre-requisites""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Open%20Systems/Data%20Connectors/OpenSystems.json","true" -"OpenSystemsProxyLogs_CL","Open Systems","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Open%20Systems","opensystemsag1582030008223","azure-sentinel-solution-osag","2025-05-12","","","Open Systems","Partner","https://www.open-systems.com/support","","domains","OpenSystems","Open Systems","Open Systems Data Connector","The Open Systems Logs API Microsoft Sentinel Connector provides the capability to ingest Open Systems logs into Microsoft Sentinel using Open Systems Logs API.","[{""title"": ""STEP 1: Prerequisites"", ""description"": ""Ensure you have the following information and permissions before proceeding: \n1. Open Systems Logs API endpoint and connection String. \n2. Service Principal credentials (Client ID, Client Secret, Object/Principal ID). \n3. Permissions to deploy Azure Container Apps, Managed Environments, Data Collection Rules (DCRs), Data Collection Endpoints (DCEs), and create Role Assignments (typically 'Contributor' role on the subscription or resource group).""}, {""title"": ""STEP 2: Deploy the Connector"", ""description"": ""Deploy the ARM template to set up the data processing resources, including the data collection rule and associated components.\n\n1. Click the **Deploy to Azure** button below. This will take you to the Azure portal.\n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-OpenSystemsLogsAPI-azuredeploy)\n\n2. In the Azure portal, select your desired **Subscription**, **Resource Group**, and **Region**.\n3. Provide the required parameters, including those gathered in the prerequisites step (Open Systems Logs API details, Service Principal credentials, etc.), when prompted by the deployment wizard.\n4. Review the terms and click **Review + create**, then **Create** to start the deployment.""}, {""title"": ""STEP 3: Post-Deployment Verification"", ""description"": ""After successful deployment: \n1. Verify that the Azure Container App running the processor is in a 'Running' state. \n2. Check the `OpenSystemsZtnaLogs_CL`, `OpenSystemsFirewallLogs_CL`, `OpenSystemsAuthenticationLogs_CL`, and `OpenSystemsProxyLogs_CL` tables in your Log Analytics workspace for incoming data. It may take some time for logs to appear after initial setup. \n3. Use the sample queries provided in the 'Next Steps' tab of this data connector page to view and analyze your logs.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Container Apps, DCRs, and DCEs"", ""description"": ""Permissions to deploy Azure Container Apps, Managed Environments, Data Collection Rules (DCRs), and Data Collection Endpoints (DCEs) are required. This is typically covered by having the 'Contributor' role on the subscription or resource group.""}, {""name"": ""Role Assignment Permissions"", ""description"": ""Permissions to create role assignments (specifically 'Monitoring Metrics Publisher' on DCRs) are required for the deploying user or service principal.""}, {""name"": ""Required Credentials for ARM Template"", ""description"": ""During deployment, you will need to provide: Open Systems Logs API endpoint and connection string, and Service Principal credentials (Client ID, Client Secret, Object/Principal ID).""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Custom prerequisites if necessary, otherwise delete this customs tag"", ""description"": ""Description for any custom pre-requisites""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Open%20Systems/Data%20Connectors/OpenSystems.json","true" -"OpenSystemsZtnaLogs_CL","Open Systems","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Open%20Systems","opensystemsag1582030008223","azure-sentinel-solution-osag","2025-05-12","","","Open Systems","Partner","https://www.open-systems.com/support","","domains","OpenSystems","Open Systems","Open Systems Data Connector","The Open Systems Logs API Microsoft Sentinel Connector provides the capability to ingest Open Systems logs into Microsoft Sentinel using Open Systems Logs API.","[{""title"": ""STEP 1: Prerequisites"", ""description"": ""Ensure you have the following information and permissions before proceeding: \n1. Open Systems Logs API endpoint and connection String. \n2. Service Principal credentials (Client ID, Client Secret, Object/Principal ID). \n3. Permissions to deploy Azure Container Apps, Managed Environments, Data Collection Rules (DCRs), Data Collection Endpoints (DCEs), and create Role Assignments (typically 'Contributor' role on the subscription or resource group).""}, {""title"": ""STEP 2: Deploy the Connector"", ""description"": ""Deploy the ARM template to set up the data processing resources, including the data collection rule and associated components.\n\n1. Click the **Deploy to Azure** button below. This will take you to the Azure portal.\n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-OpenSystemsLogsAPI-azuredeploy)\n\n2. In the Azure portal, select your desired **Subscription**, **Resource Group**, and **Region**.\n3. Provide the required parameters, including those gathered in the prerequisites step (Open Systems Logs API details, Service Principal credentials, etc.), when prompted by the deployment wizard.\n4. Review the terms and click **Review + create**, then **Create** to start the deployment.""}, {""title"": ""STEP 3: Post-Deployment Verification"", ""description"": ""After successful deployment: \n1. Verify that the Azure Container App running the processor is in a 'Running' state. \n2. Check the `OpenSystemsZtnaLogs_CL`, `OpenSystemsFirewallLogs_CL`, `OpenSystemsAuthenticationLogs_CL`, and `OpenSystemsProxyLogs_CL` tables in your Log Analytics workspace for incoming data. It may take some time for logs to appear after initial setup. \n3. Use the sample queries provided in the 'Next Steps' tab of this data connector page to view and analyze your logs.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Container Apps, DCRs, and DCEs"", ""description"": ""Permissions to deploy Azure Container Apps, Managed Environments, Data Collection Rules (DCRs), and Data Collection Endpoints (DCEs) are required. This is typically covered by having the 'Contributor' role on the subscription or resource group.""}, {""name"": ""Role Assignment Permissions"", ""description"": ""Permissions to create role assignments (specifically 'Monitoring Metrics Publisher' on DCRs) are required for the deploying user or service principal.""}, {""name"": ""Required Credentials for ARM Template"", ""description"": ""During deployment, you will need to provide: Open Systems Logs API endpoint and connection string, and Service Principal credentials (Client ID, Client Secret, Object/Principal ID).""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Custom prerequisites if necessary, otherwise delete this customs tag"", ""description"": ""Description for any custom pre-requisites""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Open%20Systems/Data%20Connectors/OpenSystems.json","true" -"","OpenCTI","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OpenCTI","azuresentinel","azure-sentinel-solution-opencti","2022-09-22","2022-09-22","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"","OpenVPN","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OpenVPN","azuresentinel","azure-sentinel-solution-openvpn","2022-08-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"Syslog","OpenVPN","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OpenVPN","azuresentinel","azure-sentinel-solution-openvpn","2022-08-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OpenVPN","OpenVPN","[Deprecated] OpenVPN Server","The [OpenVPN](https://github.com/OpenVPN) data connector provides the capability to ingest OpenVPN Server logs into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**OpenVpnEvent**](https://aka.ms/sentinel-openvpn-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Server where the OpenVPN are forwarded.\n\n> Logs from OpenVPN Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n2. Select **Apply below configuration to my machines** and select the facilities and severities.\n3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Check your OpenVPN logs."", ""description"": ""OpenVPN server logs are written into common syslog file (depending on the Linux distribution used: e.g. /var/log/messages)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OpenVPN/Data%20Connectors/OpenVPN_Syslog.json","true" -"","Oracle Cloud Infrastructure","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Oracle%20Cloud%20Infrastructure","azuresentinel","azure-sentinel-solution-ocilogs","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"OCI_LogsV2_CL","Oracle Cloud Infrastructure","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Oracle%20Cloud%20Infrastructure","azuresentinel","azure-sentinel-solution-ocilogs","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OCI-Connector-CCP-Definition","Microsoft","Oracle Cloud Infrastructure (via Codeless Connector Framework)","The Oracle Cloud Infrastructure (OCI) data connector provides the capability to ingest OCI Logs from [OCI Stream](https://docs.oracle.com/iaas/Content/Streaming/Concepts/streamingoverview.htm) into Microsoft Sentinel using the [OCI Streaming REST API](https://docs.oracle.com/iaas/api/#/streaming/streaming/20180418).","[{""title"": ""Connect to OCI Streaming API to start collecting Event logs in Microsoft Sentinel"", ""description"": ""1) Log in to the OCI console and access the navigation menu.\n2) In the navigation menu, go to \""Analytics & AI\"" -> \""Streaming\"".\n3) Click \""Create Stream\"".\n4) Select an existing \""Stream Pool\"" or create a new one.\n5) Enter the following details:\n - \""Stream Name\""\n - \""Retention\""\n - \""Number of Partitions\""\n - \""Total Write Rate\""\n - \""Total Read Rate\"" (based on your data volume)\n6) In the navigation menu, go to \""Logging\"" -> \""Service Connectors\"".\n7) Click \""Create Service Connector\"".\n8) Enter the following details:\n - \""Connector Name\""\n - \""Description\""\n - \""Resource Compartment\""\n9) Select the \""Source\"": \""Logging\"".\n10) Select the \""Target\"": \""Streaming\"".\n11) (Optional) Configure \""Log Group\"", \""Filters\"", or use a \""custom search query\"" to stream only the required logs.\n12) Configure the \""Target\"" by selecting the previously created stream.\n13) Click \""Create\"".\n14) Follow the documentation to create a [Private Key and API Key Configuration File](https://docs.oracle.com/en-us/iaas/Content/API/Concepts/apisigningkey.htm).\n\r"", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Tenant Id"", ""columnValue"": ""properties.auth.tenantId""}, {""columnName"": ""Stream"", ""columnValue"": ""properties.request.streamId""}, {""columnName"": ""Partition"", ""columnValue"": ""properties.request.partitions""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""label"": ""Add stream"", ""title"": ""Add Oracle Cloud Infrastructure Data Stream"", ""subtitle"": ""Connect to Oracle Cloud Infrastructure Data"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Stream OCID"", ""placeholder"": ""Provide the OCI Stream OCID (E.g. ocid1.stream.oc1..xxxxxxEXAMPLExxxxxx)"", ""type"": ""text"", ""name"": ""streamId"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Public Message Endpoint of the stream (Service Endpoint Base URL)"", ""placeholder"": ""Provide the Service Endpoint Base URL: (https://cell-1.streaming.ap-hyderabad-1.oci.oraclecloud.com)"", ""type"": ""text"", ""name"": ""serviceEndpointBaseUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Cursor Type"", ""name"": ""cursorType"", ""options"": [{""key"": ""IndividualCursor"", ""text"": ""Individual Cursor""}], ""required"": true}}, {""type"": ""InfoMessage"", ""parameters"": {""text"": ""The partition ID uses zero-based indexing. For example, if a stream has 3 partitions, the valid partition IDs are 0, 1, or 2.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Partition Id"", ""placeholder"": ""Provide the Partition Id. (E.g. 0 or 1 or 2)"", ""type"": ""text"", ""name"": ""partitions"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Tenant ID"", ""placeholder"": ""OCI Tenant ID (E.g. ocid1.tenancy.oc1..xxxxxxEXAMPLExxxxxx)"", ""type"": ""text"", ""name"": ""tenantId"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""User ID"", ""placeholder"": ""Provide the User Id. (E.g. ocid1.user.oc1..xxxxxxEXAMPLExxxxxx)"", ""type"": ""text"", ""name"": ""userId"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Pem File Content"", ""placeholder"": ""Provide the Pem File content."", ""type"": ""password"", ""name"": ""pemFile"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Fingerprint"", ""placeholder"": ""Provide the fingerprint for the Pem File Content. (E.g. 12:34:56:78:90:AB:CD:EF:GH:IJ:KL:MN:OP)"", ""type"": ""password"", ""name"": ""publicFingerprint"", ""validations"": {""required"": true}}}, {""type"": ""InfoMessage"", ""parameters"": {""text"": ""If your PEM file is not encrypted, leave Pass Phrase as blank.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Pem File Pass Phrase"", ""placeholder"": ""Just Leave blank If not encrypted)"", ""type"": ""password"", ""name"": ""passPhrase""}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true, ""action"": false}}], ""customs"": [{""name"": ""OCI Streaming API access"", ""description"": ""Access to the OCI Streaming API through a API Signing Keys is required.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Oracle%20Cloud%20Infrastructure/Data%20Connectors/Oracle_Cloud_Infrastructure_CCP/OCI_DataConnector_DataConnectorDefinition.json","true" -"OCI_Logs_CL","Oracle Cloud Infrastructure","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Oracle%20Cloud%20Infrastructure","azuresentinel","azure-sentinel-solution-ocilogs","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OracleCloudInfrastructureLogsConnector","Oracle","[DEPRECATED] Oracle Cloud Infrastructure","The Oracle Cloud Infrastructure (OCI) data connector provides the capability to ingest OCI Logs from [OCI Stream](https://docs.oracle.com/iaas/Content/Streaming/Concepts/streamingoverview.htm) into Microsoft Sentinel using the [OCI Streaming REST API](https://docs.oracle.com/iaas/api/#/streaming/streaming/20180418).

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector can go over the 500 column limit of log Analytics. When this happens some logs will be dropped. For this reason the connector can be unrealiable depending on the logs that are being generated and collected.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Azure Blob Storage API to pull logs into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**OCILogs**](https://aka.ms/sentinel-OracleCloudInfrastructureLogsConnector-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Creating Stream**\n\n1. Log in to OCI console and go to *navigation menu* -> *Analytics & AI* -> *Streaming*\n2. Click *Create Stream*\n3. Select Stream Pool or create a new one\n4. Provide the *Stream Name*, *Retention*, *Number of Partitions*, *Total Write Rate*, *Total Read Rate* based on your data amount.\n5. Go to *navigation menu* -> *Logging* -> *Service Connectors*\n6. Click *Create Service Connector*\n6. Provide *Connector Name*, *Description*, *Resource Compartment*\n7. Select Source: Logging\n8. Select Target: Streaming\n9. (Optional) Configure *Log Group*, *Filters* or use custom search query to stream only logs that you need.\n10. Configure Target - select the strem created before.\n11. Click *Create*\n\nCheck the documentation to get more information about [Streaming](https://docs.oracle.com/en-us/iaas/Content/Streaming/home.htm) and [Service Connectors](https://docs.oracle.com/en-us/iaas/Content/service-connector-hub/home.htm).""}, {""title"": """", ""description"": ""**STEP 2 - Creating credentials for OCI REST API**\n\nFollow the documentation to [create Private Key and API Key Configuration File.](https://docs.oracle.com/en-us/iaas/Content/API/Concepts/apisigningkey.htm)\n\n>**IMPORTANT:** Save Private Key and API Key Configuration File created during this step as they will be used during deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the OCI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as OCI API credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the OCI data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-OracleCloudInfrastructureLogsConnector-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Microsoft Sentinel Workspace Id**, **Microsoft Sentinel Shared Key**, **User**, **Key_content**, **Pass_phrase**, **Fingerprint**, **Tenancy**, **Region**, **Message Endpoint**, **Stream Ocid**\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the OCI data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the [Azure Function App](https://aka.ms/sentinel-OracleCloudInfrastructureLogsConnector-functionapp) file. Extract archive to your local development computer..\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tAzureSentinelWorkspaceId\n\t\tAzureSentinelSharedKey\n\t\tuser\n\t\tkey_content\n\t\tpass_phrase (Optional)\n\t\tfingerprint\n\t\ttenancy\n\t\tregion\n\t\tMessage Endpoint\n\t\tStreamOcid\n\t\tlogAnalyticsUri (Optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://WORKSPACE_ID.ods.opinsights.azure.us`.\n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""OCI API Credentials"", ""description"": "" **API Key Configuration File** and **Private Key** are required for OCI API connection. See the documentation to learn more about [creating keys for API access](https://docs.oracle.com/en-us/iaas/Content/API/Concepts/apisigningkey.htm)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Oracle%20Cloud%20Infrastructure/Data%20Connectors/OCI_logs_API_FunctionApp.json","true" -"","OracleDatabaseAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OracleDatabaseAudit","azuresentinel","azure-sentinel-solution-oracledbaudit","2021-11-05","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"Syslog","OracleDatabaseAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OracleDatabaseAudit","azuresentinel","azure-sentinel-solution-oracledbaudit","2021-11-05","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OracleDatabaseAudit","Oracle","[Deprecated] Oracle Database Audit","The Oracle DB Audit data connector provides the capability to ingest [Oracle Database](https://www.oracle.com/database/technologies/) audit events into Microsoft Sentinel through the syslog. Refer to [documentation](https://docs.oracle.com/en/database/oracle/oracle-database/21/dbseg/introduction-to-auditing.html#GUID-94381464-53A3-421B-8F13-BD171C867405) for more information.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Oracle Database Audit and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OracleDatabaseAudit/Parsers/OracleDatabaseAuditEvent.txt). The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n2. Select **Apply below configuration to my machines** and select the facilities and severities.\n3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure Oracle Database Audit events to be sent to Syslog"", ""description"": ""Follow the below instructions \n\n 1. Create the Oracle database [Follow these steps.](https://learn.microsoft.com/en-us/azure/virtual-machines/workloads/oracle/oracle-database-quick-create) \n\n 2. Login to Oracle database created from the above step [Follow these steps.](https://docs.oracle.com/cd/F49540_01/DOC/server.815/a67772/create.htm) \n\n 3. Enable unified logging over syslog by **Alter the system to enable unified logging** [Following these steps.](https://docs.oracle.com/en/database/oracle/oracle-database/21/refrn/UNIFIED_AUDIT_COMMON_SYSTEMLOG.html#GUID-9F26BC8E-1397-4B0E-8A08-3B12E4F9ED3A) \n\n 4. Create and **enable an Audit policy for unified auditing** [Follow these steps.](https://docs.oracle.com/en/database/oracle/oracle-database/19/sqlrf/CREATE-AUDIT-POLICY-Unified-Auditing.html#GUID-8D6961FB-2E50-46F5-81F7-9AEA314FC693) \n\n 5. **Enabling syslog and Event Viewer** Captures for the Unified Audit Trail [Follow these steps.](https://docs.oracle.com/en/database/oracle/oracle-database/18/dbseg/administering-the-audit-trail.html#GUID-3EFB75DB-AE1C-44E6-B46E-30E5702B0FC4)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OracleDatabaseAudit/Data%20Connectors/Connector_OracleDatabaseAudit.json","true" -"","OracleWebLogicServer","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OracleWebLogicServer","azuresentinel","azure-sentinel-solution-oracleweblogicserver","2022-01-06","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"OracleWebLogicServer_CL","OracleWebLogicServer","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OracleWebLogicServer","azuresentinel","azure-sentinel-solution-oracleweblogicserver","2022-01-06","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OracleWebLogicServer","Oracle","[Deprecated] Oracle WebLogic Server","OracleWebLogicServer data connector provides the capability to ingest [OracleWebLogicServer](https://docs.oracle.com/en/middleware/standalone/weblogic-server/index.html) events into Microsoft Sentinel. Refer to [OracleWebLogicServer documentation](https://docs.oracle.com/en/middleware/standalone/weblogic-server/14.1.1.0/index.html) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias OracleWebLogicServerEvent and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OracleWebLogicServer/Parsers/OracleWebLogicServerEvent.yaml). The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Oracle WebLogic Server where the logs are generated.\n\n> Logs from Oracle WebLogic Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the custom log directory to be collected"", ""instructions"": [{""parameters"": {""linkType"": ""OpenCustomLogsSettings""}, ""type"": ""InstallAgent""}]}, {""title"": """", ""description"": ""1. Select the link above to open your workspace advanced settings \n2. From the left pane, select **Data**, select **Custom Logs** and click **Add+**\n3. Click **Browse** to upload a sample of a OracleWebLogicServer log file (e.g. server.log). Then, click **Next >**\n4. Select **New line** as the record delimiter and click **Next >**\n5. Select **Windows** or **Linux** and enter the path to OracleWebLogicServer logs based on your configuration. Example: \n - **Linux** Directory: 'DOMAIN_HOME/servers/server_name/logs/*.log'\n - **Windows** Directory: 'DOMAIN_NAME\\servers\\SERVER_NAME\\logs\\*.log'\n6. After entering the path, click the '+' symbol to apply, then click **Next >** \n7. Add **OracleWebLogicServer_CL** as the custom log Name and click **Done**""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OracleWebLogicServer/Data%20Connectors/Connector_OracleWebLogicServer_agent.json","true" -"","Orca Security Alerts","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Orca%20Security%20Alerts","orcasecurityinc1621870991703","orca_security_alerts_mss","2022-05-10","","","Orca Security","Partner","https://orca.security/about/contact/","","domains","","","","","","","false","","false" -"OrcaAlerts_CL","Orca Security Alerts","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Orca%20Security%20Alerts","orcasecurityinc1621870991703","orca_security_alerts_mss","2022-05-10","","","Orca Security","Partner","https://orca.security/about/contact/","","domains","OrcaSecurityAlerts","Orca Security","Orca Security Alerts","The Orca Security Alerts connector allows you to easily export Alerts logs to Microsoft Sentinel.","[{""title"": """", ""description"": ""Follow [guidance](https://orcasecurity.zendesk.com/hc/en-us/articles/360043941992-Azure-Sentinel-configuration) for integrating Orca Security Alerts logs with Microsoft Sentinel."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Orca%20Security%20Alerts/Data%20Connectors/OrcaSecurityAlerts.json","true" -"","PCI DSS Compliance","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PCI%20DSS%20Compliance","azuresentinel","azure-sentinel-solution-pcidsscompliance","2022-06-29","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"","PDNS Block Data Connector","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PDNS%20Block%20Data%20Connector","azuresentinel","azure-sentinel-solution-pdnsblockdataconnector","2023-03-31","","","Nominet PDNS Support","Partner","https://www.protectivedns.service.ncsc.gov.uk/pdns","","domains","","","","","","","false","","false" -"PDNSBlockData_CL","PDNS Block Data Connector","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PDNS%20Block%20Data%20Connector","azuresentinel","azure-sentinel-solution-pdnsblockdataconnector","2023-03-31","","","Nominet PDNS Support","Partner","https://www.protectivedns.service.ncsc.gov.uk/pdns","","domains","PDNSBlockDataConnector","Nominet","PDNS Block Data Connector","This application enables you to ingest your PDNS block data into your SIEM tool","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to pull logs into Microsoft Sentinel. This might result in additional costs for data ingestion. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": ""**STEP 1 - To configure access to the S3 Bucket containing your PDNS Data Blocks, use the Access Key ID, Secret Access Key, and Role ARN that were provided to you.**""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**AWSAccessKeyId** and **AWSSecretAccessKey** are required for making AWS API calls.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PDNS%20Block%20Data%20Connector/Data%20Connectors/PDNSBlockDataConnector_API_FunctionApp.json","true" -"","Palo Alto - XDR (Cortex)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20-%20XDR%20%28Cortex%29","","","","","","","","","","","","","","","","","false","","false" -"CommonSecurityLog","Palo Alto - XDR (Cortex)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20-%20XDR%20%28Cortex%29","","","","","","","","","","","PaloAltoNetworksCortex","Palo Alto Networks","Palo Alto Networks Cortex XDR","The Palo Alto Networks Cortex XDR connector gives you an easy way to connect to your Cortex XDR logs with Microsoft Sentinel. This increases the visibility of your endpoint security. It will give you better ability to monitor your resources by creating custom Workbooks, analytics rules, Incident investigation, and evidence gathering.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Azure Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Azure Sentinel will use as the proxy between your security solution and Azure Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Azure Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Palo Alto Networks (Cortex) logs to Syslog agent"", ""description"": ""\n\n> 1. Go to [Cortex Settings and Configurations](https://inspira.xdr.in.paloaltonetworks.com/configuration/external-alerting) and Click to add New Server under External Applications.\n\n> 2. Then specify the name and Give public IP of your syslog server in Destination. \n\n> 3. Give Port number as 514 and from Facility field select FAC_SYSLOG from dropdown. \n\n> 4. Select Protocol as UDP and hit Create.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20-%20XDR%20%28Cortex%29/Data%20Connectors/Connector_PaloAlto_XDR_CEF.json","true" -"","Palo Alto Cortex XDR CCP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Cortex%20XDR%20CCP","azuresentinel","azure-sentinel-solution-cortexccp","2024-12-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"PaloAltoCortexXDR_Alerts_CL","Palo Alto Cortex XDR CCP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Cortex%20XDR%20CCP","azuresentinel","azure-sentinel-solution-cortexccp","2024-12-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CortexXDRDataConnector","Microsoft","Palo Alto Cortex XDR","The [Palo Alto Cortex XDR](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/09agw06t5dpvw-cortex-xdr-rest-api) data connector allows ingesting logs from the Palo Alto Cortex XDR API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the Palo Alto Cortex XDR API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the Palo Alto Cortex XDR API \n Follow the instructions to obtain the credentials. you can also follow this [guide](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/3u3j0e7hcx8t1-get-started-with-cortex-xdr-ap-is) to generate API key.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve API URL\n 1.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 1.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 1.3. Under [**Integrations**] click on [**API Keys**].\n 1.4. In the [**Settings**] Page click on [**Copy API URL**] in the top right corner.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve API Token\n 2.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 2.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 2.3. Under [**Integrations**] click on [**API Keys**].\n 2.4. In the [**Settings**] Page click on [**New Key**] in the top right corner.\n 2.5. Choose security level, role, choose Standard and click on [**Generate**]\n 2.6. Copy the API Token, once it generated the [**API Token ID**] can be found under the ID column""}}, {""parameters"": {""label"": ""Base API URL"", ""placeholder"": ""https://api-example.xdr.au.paloaltonetworks.com"", ""type"": ""text"", ""name"": ""apiUrl""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""API Key ID"", ""placeholder"": ""API ID"", ""type"": ""text"", ""name"": ""apiId""}, ""type"": ""Textbox""}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""password"", ""name"": ""apiToken""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Cortex%20XDR%20CCP/Data%20Connectors/CortexXDR_ccp/DataConnectorDefinition.json","true" -"PaloAltoCortexXDR_Audit_Agent_CL","Palo Alto Cortex XDR CCP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Cortex%20XDR%20CCP","azuresentinel","azure-sentinel-solution-cortexccp","2024-12-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CortexXDRDataConnector","Microsoft","Palo Alto Cortex XDR","The [Palo Alto Cortex XDR](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/09agw06t5dpvw-cortex-xdr-rest-api) data connector allows ingesting logs from the Palo Alto Cortex XDR API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the Palo Alto Cortex XDR API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the Palo Alto Cortex XDR API \n Follow the instructions to obtain the credentials. you can also follow this [guide](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/3u3j0e7hcx8t1-get-started-with-cortex-xdr-ap-is) to generate API key.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve API URL\n 1.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 1.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 1.3. Under [**Integrations**] click on [**API Keys**].\n 1.4. In the [**Settings**] Page click on [**Copy API URL**] in the top right corner.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve API Token\n 2.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 2.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 2.3. Under [**Integrations**] click on [**API Keys**].\n 2.4. In the [**Settings**] Page click on [**New Key**] in the top right corner.\n 2.5. Choose security level, role, choose Standard and click on [**Generate**]\n 2.6. Copy the API Token, once it generated the [**API Token ID**] can be found under the ID column""}}, {""parameters"": {""label"": ""Base API URL"", ""placeholder"": ""https://api-example.xdr.au.paloaltonetworks.com"", ""type"": ""text"", ""name"": ""apiUrl""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""API Key ID"", ""placeholder"": ""API ID"", ""type"": ""text"", ""name"": ""apiId""}, ""type"": ""Textbox""}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""password"", ""name"": ""apiToken""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Cortex%20XDR%20CCP/Data%20Connectors/CortexXDR_ccp/DataConnectorDefinition.json","true" -"PaloAltoCortexXDR_Audit_Management_CL","Palo Alto Cortex XDR CCP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Cortex%20XDR%20CCP","azuresentinel","azure-sentinel-solution-cortexccp","2024-12-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CortexXDRDataConnector","Microsoft","Palo Alto Cortex XDR","The [Palo Alto Cortex XDR](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/09agw06t5dpvw-cortex-xdr-rest-api) data connector allows ingesting logs from the Palo Alto Cortex XDR API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the Palo Alto Cortex XDR API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the Palo Alto Cortex XDR API \n Follow the instructions to obtain the credentials. you can also follow this [guide](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/3u3j0e7hcx8t1-get-started-with-cortex-xdr-ap-is) to generate API key.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve API URL\n 1.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 1.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 1.3. Under [**Integrations**] click on [**API Keys**].\n 1.4. In the [**Settings**] Page click on [**Copy API URL**] in the top right corner.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve API Token\n 2.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 2.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 2.3. Under [**Integrations**] click on [**API Keys**].\n 2.4. In the [**Settings**] Page click on [**New Key**] in the top right corner.\n 2.5. Choose security level, role, choose Standard and click on [**Generate**]\n 2.6. Copy the API Token, once it generated the [**API Token ID**] can be found under the ID column""}}, {""parameters"": {""label"": ""Base API URL"", ""placeholder"": ""https://api-example.xdr.au.paloaltonetworks.com"", ""type"": ""text"", ""name"": ""apiUrl""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""API Key ID"", ""placeholder"": ""API ID"", ""type"": ""text"", ""name"": ""apiId""}, ""type"": ""Textbox""}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""password"", ""name"": ""apiToken""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Cortex%20XDR%20CCP/Data%20Connectors/CortexXDR_ccp/DataConnectorDefinition.json","true" -"PaloAltoCortexXDR_Endpoints_CL","Palo Alto Cortex XDR CCP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Cortex%20XDR%20CCP","azuresentinel","azure-sentinel-solution-cortexccp","2024-12-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CortexXDRDataConnector","Microsoft","Palo Alto Cortex XDR","The [Palo Alto Cortex XDR](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/09agw06t5dpvw-cortex-xdr-rest-api) data connector allows ingesting logs from the Palo Alto Cortex XDR API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the Palo Alto Cortex XDR API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the Palo Alto Cortex XDR API \n Follow the instructions to obtain the credentials. you can also follow this [guide](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/3u3j0e7hcx8t1-get-started-with-cortex-xdr-ap-is) to generate API key.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve API URL\n 1.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 1.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 1.3. Under [**Integrations**] click on [**API Keys**].\n 1.4. In the [**Settings**] Page click on [**Copy API URL**] in the top right corner.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve API Token\n 2.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 2.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 2.3. Under [**Integrations**] click on [**API Keys**].\n 2.4. In the [**Settings**] Page click on [**New Key**] in the top right corner.\n 2.5. Choose security level, role, choose Standard and click on [**Generate**]\n 2.6. Copy the API Token, once it generated the [**API Token ID**] can be found under the ID column""}}, {""parameters"": {""label"": ""Base API URL"", ""placeholder"": ""https://api-example.xdr.au.paloaltonetworks.com"", ""type"": ""text"", ""name"": ""apiUrl""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""API Key ID"", ""placeholder"": ""API ID"", ""type"": ""text"", ""name"": ""apiId""}, ""type"": ""Textbox""}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""password"", ""name"": ""apiToken""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Cortex%20XDR%20CCP/Data%20Connectors/CortexXDR_ccp/DataConnectorDefinition.json","true" -"PaloAltoCortexXDR_Incidents_CL","Palo Alto Cortex XDR CCP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Cortex%20XDR%20CCP","azuresentinel","azure-sentinel-solution-cortexccp","2024-12-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CortexXDRDataConnector","Microsoft","Palo Alto Cortex XDR","The [Palo Alto Cortex XDR](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/09agw06t5dpvw-cortex-xdr-rest-api) data connector allows ingesting logs from the Palo Alto Cortex XDR API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the Palo Alto Cortex XDR API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the Palo Alto Cortex XDR API \n Follow the instructions to obtain the credentials. you can also follow this [guide](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/3u3j0e7hcx8t1-get-started-with-cortex-xdr-ap-is) to generate API key.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve API URL\n 1.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 1.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 1.3. Under [**Integrations**] click on [**API Keys**].\n 1.4. In the [**Settings**] Page click on [**Copy API URL**] in the top right corner.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve API Token\n 2.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 2.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 2.3. Under [**Integrations**] click on [**API Keys**].\n 2.4. In the [**Settings**] Page click on [**New Key**] in the top right corner.\n 2.5. Choose security level, role, choose Standard and click on [**Generate**]\n 2.6. Copy the API Token, once it generated the [**API Token ID**] can be found under the ID column""}}, {""parameters"": {""label"": ""Base API URL"", ""placeholder"": ""https://api-example.xdr.au.paloaltonetworks.com"", ""type"": ""text"", ""name"": ""apiUrl""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""API Key ID"", ""placeholder"": ""API ID"", ""type"": ""text"", ""name"": ""apiId""}, ""type"": ""Textbox""}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""password"", ""name"": ""apiToken""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Cortex%20XDR%20CCP/Data%20Connectors/CortexXDR_ccp/DataConnectorDefinition.json","true" -"","Palo Alto Cortex Xpanse CCF","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Cortex%20Xpanse%20CCF","azuresentinel","azure-sentinel-solution-cortexxpanse","2024-12-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"CortexXpanseAlerts_CL","Palo Alto Cortex Xpanse CCF","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Cortex%20Xpanse%20CCF","azuresentinel","azure-sentinel-solution-cortexxpanse","2024-12-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PaloAltoExpanseCCPDefinition","Microsoft","Palo Alto Cortex Xpanse (via Codeless Connector Framework)","The Palo Alto Cortex Xpanse data connector ingests alerts data into Microsoft Sentinel.","[{""description"": ""To ingest data from Palo Alto Cortex Xpanse to Microsoft Sentinel, click on **Add Domain**. Fill in the required details in the pop-up and click Connect. You will see connected domain endpoints in the grid below. To get the Auth ID and API Key, go to **Settings \u2192 Configuration \u2192 Integrations \u2192 API Keys** in the Cortex Xpanse portal and generate new credentials."", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Endpoint"", ""columnValue"": ""properties.request.apiEndpoint""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add domain"", ""title"": ""Add domain"", ""subtitle"": ""Add domain"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Domain Name"", ""placeholder"": ""e.g., example.crtx.us.paloaltonetworks.com"", ""type"": ""text"", ""name"": ""domainName"", ""required"": true, ""description"": ""Enter the domain suffix to be used in the API endpoint, e.g., `example.crtx.us.paloaltonetworks.com`""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""Enter your Palo Alto Xpanse API Key"", ""type"": ""password"", ""name"": ""apiKey"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Xpanse Auth ID"", ""placeholder"": ""Enter your Xpanse Auth ID"", ""type"": ""text"", ""name"": ""xpanseAuthId"", ""required"": true}}]}]}}], ""title"": ""Connect Palo Alto Xpanse to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Cortex%20Xpanse%20CCF/Data%20Connectors/CortexXpanse_ccp/CortexXpanse_ConnectorDefinition.json","true" -"","Palo Alto Prisma Cloud CWPP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Prisma%20Cloud%20CWPP","azuresentinel","azure-sentinel-solution-prismacloudcompute","2022-06-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"PrismaCloudCompute_CL","Palo Alto Prisma Cloud CWPP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Prisma%20Cloud%20CWPP","azuresentinel","azure-sentinel-solution-prismacloudcompute","2022-06-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","PaloAltoPrismaCloudCWPP","Microsoft","Palo Alto Prisma Cloud CWPP (using REST API)","The [Palo Alto Prisma Cloud CWPP](https://prisma.pan.dev/api/cloud/cwpp/audits/#operation/get-audits-incidents) data connector allows you to connect to your Palo Alto Prisma Cloud CWPP instance and ingesting alerts into Microsoft Sentinel. The data connector is built on Microsoft Sentinel's Codeless Connector Platform and uses the Prisma Cloud API to fetch security events and supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security event data into a custom columns so that queries don't need to parse it again, thus resulting in better performance.","[{""description"": ""To enable the Palo Alto Prisma Cloud CWPP Security Events for Microsoft Sentinel, provide the required information below and click on Connect.\n>"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Path to console"", ""placeholder"": ""europe-west3.cloud.twistlock.com/{sasid}"", ""type"": ""text"", ""name"": ""domainname""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Prisma Access Key (API)"", ""placeholder"": ""Prisma Access Key (API)"", ""type"": ""text"", ""name"": ""username""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Secret"", ""placeholder"": ""Secret"", ""type"": ""password"", ""name"": ""password""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""title"": ""Connect Palo Alto Prisma Cloud CWPP Security Events to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""PrismaCloudCompute API Key"", ""description"": ""A Palo Alto Prisma Cloud CWPP Monitor API username and password is required. [See the documentation to learn more about PrismaCloudCompute SIEM API](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Prisma%20Cloud%20CWPP/Data%20Connectors/readme.md).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Prisma%20Cloud%20CWPP/Data%20Connectors/PaloAltoPrismaCloudCWPP_ccp/connectorDefinition.json","true" -"PrismaCloudCompute_CL","Palo Alto Prisma Cloud CWPP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Prisma%20Cloud%20CWPP","azuresentinel","azure-sentinel-solution-prismacloudcompute","2022-06-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","PrismaCloudComputeNativePoller","Microsoft","Palo Alto Prisma Cloud CWPP (using REST API)","The [Palo Alto Prisma Cloud CWPP](https://prisma.pan.dev/api/cloud/cwpp/audits/#operation/get-audits-incidents) data connector allows you to connect to your Prisma Cloud CWPP instance and ingesting alerts into Microsoft Sentinel. The data connector is built on Microsoft Sentinel’s Codeless Connector Platform and uses the Prisma Cloud API to fetch security events and supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security event data into a custom columns so that queries don't need to parse it again, thus resulting in better performance.","[{""description"": ""To enable the Palo Alto Prisma Cloud CWPP Security Events for Microsoft Sentinel, provide the required information below and click on Connect.\n>"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Path to console"", ""placeholder"": ""https://europe-west3.cloud.twistlock.com/{sasid}"", ""type"": ""text"", ""name"": ""domainname""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Prisma Access Key (API)"", ""placeholder"": ""Prisma Access Key (API)"", ""type"": ""text"", ""name"": ""username""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Secret"", ""placeholder"": ""Secret"", ""type"": ""password"", ""name"": ""password""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""title"": ""Connect Palo Alto Prisma Cloud CWPP Security Events to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""PrismaCloudCompute API Key"", ""description"": ""A Palo Alto Prisma Cloud CWPP Monitor API username and password is required. [See the documentation to learn more about PrismaCloudCompute SIEM API](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Prisma%20Cloud%20CWPP/Data%20Connectors/readme.md).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Prisma%20Cloud%20CWPP/Data%20Connectors/PrismaCloudCompute_CLV2.json","true" -"","PaloAlto-PAN-OS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAlto-PAN-OS","azuresentinel","azure-sentinel-solution-paloaltopanos","2021-08-09","2021-09-20","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"CommonSecurityLog","PaloAlto-PAN-OS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAlto-PAN-OS","azuresentinel","azure-sentinel-solution-paloaltopanos","2021-08-09","2021-09-20","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PaloAltoNetworks","Palo Alto Networks","[Deprecated] Palo Alto Networks (Firewall) via Legacy Agent","The Palo Alto Networks firewall connector allows you to easily connect your Palo Alto Networks logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Palo Alto Networks logs to Syslog agent"", ""description"": ""Configure Palo Alto Networks to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent.\n\nGo to [configure Palo Alto Networks NGFW for sending CEF events.](https://aka.ms/sentinel-paloaltonetworks-readme)\n\nGo to [Palo Alto CEF Configuration](https://aka.ms/asi-syslog-paloalto-forwarding) and Palo Alto [Configure Syslog Monitoring](https://aka.ms/asi-syslog-paloalto-configure) steps 2, 3, choose your version, and follow the instructions using the following guidelines:\n\n1. Set the Syslog server format to **BSD**.\n\n2. The copy/paste operations from the PDF might change the text and insert random characters. To avoid this, copy the text to an editor and remove any characters that might break the log format before pasting it.\n\n[Learn more >](https://aka.ms/CEFPaloAlto)""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAlto-PAN-OS/Data%20Connectors/PaloAltoNetworks.json","true" -"CommonSecurityLog","PaloAlto-PAN-OS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAlto-PAN-OS","azuresentinel","azure-sentinel-solution-paloaltopanos","2021-08-09","2021-09-20","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PaloAltoNetworksAma","Palo Alto Networks","[Deprecated] Palo Alto Networks (Firewall) via AMA","The Palo Alto Networks firewall connector allows you to easily connect your Palo Alto Networks logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": """", ""description"": """", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Forward Palo Alto Networks logs to Syslog agent"", ""description"": ""Configure Palo Alto Networks to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent.\n\nGo to [configure Palo Alto Networks NGFW for sending CEF events.](https://aka.ms/sentinel-paloaltonetworks-readme)\n\nGo to [Palo Alto CEF Configuration](https://aka.ms/asi-syslog-paloalto-forwarding) and Palo Alto [Configure Syslog Monitoring](https://aka.ms/asi-syslog-paloalto-configure) steps 2, 3, choose your version, and follow the instructions using the following guidelines:\n\n1. Set the Syslog server format to **BSD**.\n\n2. The copy/paste operations from the PDF might change the text and insert random characters. To avoid this, copy the text to an editor and remove any characters that might break the log format before pasting it.\n\n[Learn more >](https://aka.ms/CEFPaloAlto)"", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAlto-PAN-OS/Data%20Connectors/template_PaloAltoNetworksAMA.json","true" -"","PaloAltoCDL","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoCDL","azuresentinel","azure-sentinel-solution-paloaltocdl","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"CommonSecurityLog","PaloAltoCDL","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoCDL","azuresentinel","azure-sentinel-solution-paloaltocdl","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PaloAltoCDL","Palo Alto Networks","[Deprecated] Palo Alto Networks Cortex Data Lake (CDL) via Legacy Agent","The [Palo Alto Networks CDL](https://www.paloaltonetworks.com/cortex/cortex-data-lake) data connector provides the capability to ingest [CDL logs](https://docs.paloaltonetworks.com/strata-logging-service/log-reference/log-forwarding-schema-overview) into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**PaloAltoCDLEvent**](https://aka.ms/sentinel-paloaltocdl-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Configure Cortex Data Lake to forward logs to a Syslog Server using CEF"", ""description"": ""[Follow the instructions](https://docs.paloaltonetworks.com/cortex/cortex-data-lake/cortex-data-lake-getting-started/get-started-with-log-forwarding-app/forward-logs-from-logging-service-to-syslog-server.html) to configure logs forwarding from Cortex Data Lake to a Syslog Server.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoCDL/Data%20Connectors/Connector_PaloAlto_CDL_CEF.json","true" -"CommonSecurityLog","PaloAltoCDL","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoCDL","azuresentinel","azure-sentinel-solution-paloaltocdl","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PaloAltoCDLAma","Palo Alto Networks","[Deprecated] Palo Alto Networks Cortex Data Lake (CDL) via AMA","The [Palo Alto Networks CDL](https://www.paloaltonetworks.com/cortex/cortex-data-lake) data connector provides the capability to ingest [CDL logs](https://docs.paloaltonetworks.com/strata-logging-service/log-reference/log-forwarding-schema-overview) into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**PaloAltoCDLEvent**](https://aka.ms/sentinel-paloaltocdl-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Configure Cortex Data Lake to forward logs to a Syslog Server using CEF"", ""description"": ""[Follow the instructions](https://docs.paloaltonetworks.com/cortex/cortex-data-lake/cortex-data-lake-getting-started/get-started-with-log-forwarding-app/forward-logs-from-logging-service-to-syslog-server.html) to configure logs forwarding from Cortex Data Lake to a Syslog Server."", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoCDL/Data%20Connectors/template_PaloAlto_CDLAMA.json","true" -"","PaloAltoPrismaCloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoPrismaCloud","azuresentinel","azure-sentinel-solution-paloaltoprisma","2021-04-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"PaloAltoPrismaCloudAlert_CL","PaloAltoPrismaCloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoPrismaCloud","azuresentinel","azure-sentinel-solution-paloaltoprisma","2021-04-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PaloAltoPrismaCloud","Palo Alto","[DEPRECATED] Palo Alto Prisma Cloud CSPM","The Palo Alto Prisma Cloud CSPM data connector provides the capability to ingest [Prisma Cloud CSPM alerts](https://prisma.pan.dev/api/cloud/cspm/alerts#operation/get-alerts) and [audit logs](https://prisma.pan.dev/api/cloud/cspm/audit-logs#operation/rl-audit-logs) into Microsoft sentinel using the Prisma Cloud CSPM API. Refer to [Prisma Cloud CSPM API documentation](https://prisma.pan.dev/api/cloud/cspm) for more information.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Palo Alto Prisma Cloud REST API to pull logs into Microsoft sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**PaloAltoPrismaCloud**](https://aka.ms/sentinel-PaloAltoPrismaCloud-parser) which is deployed with the Microsoft sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration of the Prisma Cloud**\n\nFollow the documentation to [create Prisma Cloud Access Key](https://docs.paloaltonetworks.com/prisma/prisma-cloud/prisma-cloud-admin/manage-prisma-cloud-administrators/create-access-keys.html) and [obtain Prisma Cloud API Url](https://api.docs.prismacloud.io/reference)\n\n NOTE: Please use SYSTEM ADMIN role for giving access to Prisma Cloud API because only SYSTEM ADMIN role is allowed to View Prisma Cloud Audit Logs. Refer to [Prisma Cloud Administrator Permissions (paloaltonetworks.com)](https://docs.paloaltonetworks.com/prisma/prisma-cloud/prisma-cloud-admin/manage-prisma-cloud-administrators/prisma-cloud-admin-permissions) for more details of administrator permissions.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Prisma Cloud data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as Prisma Cloud API credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Prisma Cloud data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-PaloAltoPrismaCloud-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Prisma Cloud API Url**, **Prisma Cloud Access Key ID**, **Prisma Cloud Secret Key**, **Microsoft sentinel Workspace Id**, **Microsoft sentinel Shared Key**\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Prisma Cloud data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-PaloAltoPrismaCloud-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tPrismaCloudAPIUrl\n\t\tPrismaCloudAccessKeyID\n\t\tPrismaCloudSecretKey\n\t\tAzureSentinelWorkspaceId\n\t\tAzureSentinelSharedKey\n\t\tlogAnalyticsUri (Optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://WORKSPACE_ID.ods.opinsights.azure.us`. \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Palo Alto Prisma Cloud API Credentials"", ""description"": ""**Prisma Cloud API Url**, **Prisma Cloud Access Key ID**, **Prisma Cloud Secret Key** are required for Prisma Cloud API connection. See the documentation to learn more about [creating Prisma Cloud Access Key](https://docs.paloaltonetworks.com/prisma/prisma-cloud/prisma-cloud-admin/manage-prisma-cloud-administrators/create-access-keys.html) and about [obtaining Prisma Cloud API Url](https://prisma.pan.dev/api/cloud/api-urls)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoPrismaCloud/Data%20Connectors/PrismaCloud_API_FunctionApp.json","true" -"PaloAltoPrismaCloudAudit_CL","PaloAltoPrismaCloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoPrismaCloud","azuresentinel","azure-sentinel-solution-paloaltoprisma","2021-04-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PaloAltoPrismaCloud","Palo Alto","[DEPRECATED] Palo Alto Prisma Cloud CSPM","The Palo Alto Prisma Cloud CSPM data connector provides the capability to ingest [Prisma Cloud CSPM alerts](https://prisma.pan.dev/api/cloud/cspm/alerts#operation/get-alerts) and [audit logs](https://prisma.pan.dev/api/cloud/cspm/audit-logs#operation/rl-audit-logs) into Microsoft sentinel using the Prisma Cloud CSPM API. Refer to [Prisma Cloud CSPM API documentation](https://prisma.pan.dev/api/cloud/cspm) for more information.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Palo Alto Prisma Cloud REST API to pull logs into Microsoft sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**PaloAltoPrismaCloud**](https://aka.ms/sentinel-PaloAltoPrismaCloud-parser) which is deployed with the Microsoft sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration of the Prisma Cloud**\n\nFollow the documentation to [create Prisma Cloud Access Key](https://docs.paloaltonetworks.com/prisma/prisma-cloud/prisma-cloud-admin/manage-prisma-cloud-administrators/create-access-keys.html) and [obtain Prisma Cloud API Url](https://api.docs.prismacloud.io/reference)\n\n NOTE: Please use SYSTEM ADMIN role for giving access to Prisma Cloud API because only SYSTEM ADMIN role is allowed to View Prisma Cloud Audit Logs. Refer to [Prisma Cloud Administrator Permissions (paloaltonetworks.com)](https://docs.paloaltonetworks.com/prisma/prisma-cloud/prisma-cloud-admin/manage-prisma-cloud-administrators/prisma-cloud-admin-permissions) for more details of administrator permissions.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Prisma Cloud data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as Prisma Cloud API credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Prisma Cloud data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-PaloAltoPrismaCloud-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Prisma Cloud API Url**, **Prisma Cloud Access Key ID**, **Prisma Cloud Secret Key**, **Microsoft sentinel Workspace Id**, **Microsoft sentinel Shared Key**\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Prisma Cloud data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-PaloAltoPrismaCloud-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tPrismaCloudAPIUrl\n\t\tPrismaCloudAccessKeyID\n\t\tPrismaCloudSecretKey\n\t\tAzureSentinelWorkspaceId\n\t\tAzureSentinelSharedKey\n\t\tlogAnalyticsUri (Optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://WORKSPACE_ID.ods.opinsights.azure.us`. \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Palo Alto Prisma Cloud API Credentials"", ""description"": ""**Prisma Cloud API Url**, **Prisma Cloud Access Key ID**, **Prisma Cloud Secret Key** are required for Prisma Cloud API connection. See the documentation to learn more about [creating Prisma Cloud Access Key](https://docs.paloaltonetworks.com/prisma/prisma-cloud/prisma-cloud-admin/manage-prisma-cloud-administrators/create-access-keys.html) and about [obtaining Prisma Cloud API Url](https://prisma.pan.dev/api/cloud/api-urls)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoPrismaCloud/Data%20Connectors/PrismaCloud_API_FunctionApp.json","true" -"PaloAltoPrismaCloudAlertV2_CL","PaloAltoPrismaCloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoPrismaCloud","azuresentinel","azure-sentinel-solution-paloaltoprisma","2021-04-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PaloAltoPrismaCloudCSPMCCPDefinition","Microsoft","Palo Alto Prisma Cloud CSPM (via Codeless Connector Framework)","The Palo Alto Prisma Cloud CSPM data connector allows you to connect to your Palo Alto Prisma Cloud CSPM instance and ingesting Alerts (https://pan.dev/prisma-cloud/api/cspm/alerts/) & Audit Logs(https://pan.dev/prisma-cloud/api/cspm/audit-logs/) into Microsoft Sentinel.","[{""description"": ""To get more information on how to obtain the Prisma Cloud Access Key, Secret Key, and Base URL, please refer to the[connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoPrismaCloud/Data%20Connectors/Readme.md), provide the required information below and click on Connect.\n>"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Prisma Cloud Access Key"", ""placeholder"": ""Enter Access Key"", ""type"": ""text"", ""name"": ""username""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Prisma Cloud Secret Key"", ""placeholder"": ""Enter Secret Key"", ""type"": ""password"", ""name"": ""password""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Prisma Cloud Base URL"", ""placeholder"": ""https://api2.eu.prismacloud.io"", ""type"": ""text"", ""name"": ""baseurl""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""PaloAltoPrismaCloudCSPM Api Endpoints"", ""columnValue"": ""properties.request.apiEndpoint""}]}}], ""title"": ""Connect Palo Alto Prisma Cloud CSPM Events to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoPrismaCloud/Data%20Connectors/PrismaCloudCSPMLog_CCF/PaloAltoPrismaCloudCSPMLog_ConnectorDefinition.json","true" -"PaloAltoPrismaCloudAuditV2_CL","PaloAltoPrismaCloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoPrismaCloud","azuresentinel","azure-sentinel-solution-paloaltoprisma","2021-04-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PaloAltoPrismaCloudCSPMCCPDefinition","Microsoft","Palo Alto Prisma Cloud CSPM (via Codeless Connector Framework)","The Palo Alto Prisma Cloud CSPM data connector allows you to connect to your Palo Alto Prisma Cloud CSPM instance and ingesting Alerts (https://pan.dev/prisma-cloud/api/cspm/alerts/) & Audit Logs(https://pan.dev/prisma-cloud/api/cspm/audit-logs/) into Microsoft Sentinel.","[{""description"": ""To get more information on how to obtain the Prisma Cloud Access Key, Secret Key, and Base URL, please refer to the[connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoPrismaCloud/Data%20Connectors/Readme.md), provide the required information below and click on Connect.\n>"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Prisma Cloud Access Key"", ""placeholder"": ""Enter Access Key"", ""type"": ""text"", ""name"": ""username""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Prisma Cloud Secret Key"", ""placeholder"": ""Enter Secret Key"", ""type"": ""password"", ""name"": ""password""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Prisma Cloud Base URL"", ""placeholder"": ""https://api2.eu.prismacloud.io"", ""type"": ""text"", ""name"": ""baseurl""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""PaloAltoPrismaCloudCSPM Api Endpoints"", ""columnValue"": ""properties.request.apiEndpoint""}]}}], ""title"": ""Connect Palo Alto Prisma Cloud CSPM Events to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoPrismaCloud/Data%20Connectors/PrismaCloudCSPMLog_CCF/PaloAltoPrismaCloudCSPMLog_ConnectorDefinition.json","true" -"","Pathlock_TDnR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Pathlock_TDnR","pathlockinc1631410274035","pathlock_tdnr","2022-02-17","","","Pathlock Inc.","Partner","https://pathlock.com/support/","","domains,verticals","","","","","","","false","","false" -"ABAPAuditLog","Pathlock_TDnR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Pathlock_TDnR","pathlockinc1631410274035","pathlock_tdnr","2022-02-17","","","Pathlock Inc.","Partner","https://pathlock.com/support/","","domains,verticals","Pathlock_TDnR","Pathlock Inc.","Pathlock Inc.: Threat Detection and Response for SAP","The [Pathlock Threat Detection and Response (TD&R)](https://pathlock.com/products/cybersecurity-application-controls/) integration with **Microsoft Sentinel Solution for SAP** delivers unified, real-time visibility into SAP security events, enabling organizations to detect and act on threats across all SAP landscapes. This out-of-the-box integration allows Security Operations Centers (SOCs) to correlate SAP-specific alerts with enterprise-wide telemetry, creating actionable intelligence that connects IT security with business processes.

Pathlock’s connector is purpose-built for SAP and forwards only **security-relevant events by default**, minimizing data volume and noise while maintaining the flexibility to forward all log sources when needed. Each event is enriched with **business process context**, allowing Microsoft Sentinel Solution for SAP analytics to distinguish operational patterns from real threats and to prioritize what truly matters.

This precision-driven approach helps security teams drastically reduce false positives, focus investigations, and accelerate **mean time to detect (MTTD)** and **mean time to respond (MTTR)**. Pathlock’s library consists of more than 1,500 SAP-specific detection signatures across 70+ log sources, the solution uncovers complex attack behaviors, configuration weaknesses, and access anomalies.

By combining business-context intelligence with advanced analytics, Pathlock enables enterprises to strengthen detection accuracy, streamline response actions, and maintain continuous control across their SAP environments—without adding complexity or redundant monitoring layers.","[{""title"": ""1. Create ARM Resources and Provide the Required Permissions"", ""description"": ""We will create data collection rule (DCR) and data collection endpoint (DCE) resources. We will also create a Microsoft Entra app registration and assign the required permissions to it."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Automated deployment of Azure resources\nClicking on \""Deploy push connector resources\"" will trigger the creation of DCR and DCE resources.\nIt will then create a Microsoft Entra app registration with client secret and grant permissions on the DCR. This setup enables data to be sent securely to the DCR using a OAuth v2 client credentials.""}}, {""parameters"": {""label"": ""Deploy push connector resources"", ""applicationDisplayName"": ""Pathlock Inc. Threat Detection and Response for SAP""}, ""type"": ""DeployPushConnectorButton_test""}]}, {""title"": ""2. Maintain the data collection endpoint details and authentication info in your central instance of Pathlock's Cybersecurity Application Controls: Threat Detection and Response"", ""description"": ""Share the data collection endpoint URL and authentication info with the Pathlock administrator to configure the plug and play forwarding in Threat Detection and Response to send data to the data collection endpoint.\nPlease do not hesitate to contact Pathlock if support is needed.\n\n"", ""instructions"": [{""parameters"": {""label"": ""Use this value to configure as Tenant ID in the LogIngestionAPI credential."", ""fillWith"": [""TenantId""]}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra Application ID"", ""fillWith"": [""ApplicationId""], ""placeholder"": ""Deploy push connector to get the Application ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra Application Secret"", ""fillWith"": [""ApplicationSecret""], ""placeholder"": ""Deploy push connector to get the Application Secret""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Use this value to configure the LogsIngestionURL parameter when deploying the IFlow."", ""fillWith"": [""DataCollectionEndpoint""], ""placeholder"": ""Deploy push connector to get the DCE URI""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""DCR Immutable ID"", ""fillWith"": [""DataCollectionRuleId""], ""placeholder"": ""Deploy push connector to get the DCR ID""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft Entra"", ""description"": ""Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher.""}, {""name"": ""Microsoft Azure"", ""description"": ""Permission to assign Monitoring Metrics Publisher role on data collection rules. Typically requires Azure RBAC Owner or User Access Administrator role.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Pathlock_TDnR/Data%20Connectors/Pathlock_TDnR_PUSH_CCP/Pathlock_TDnR_connectorDefinition.json","true" -"Pathlock_TDnR_CL","Pathlock_TDnR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Pathlock_TDnR","pathlockinc1631410274035","pathlock_tdnr","2022-02-17","","","Pathlock Inc.","Partner","https://pathlock.com/support/","","domains,verticals","Pathlock_TDnR","Pathlock Inc.","Pathlock Threat Detection and Response Integration","Pathlock Threat Detection and Response enables seamless forwarding of security alerts and logs detected and collected by the Pathlock Platform into Microsoft Sentinel Solution for SAP.","[{""title"": ""1. Create ARM Resources and Provide the Required Permissions"", ""description"": ""We will create data collection rule (DCR) and data collection endpoint (DCE) resources. We will also create a Microsoft Entra app registration and assign the required permissions to it."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Automated deployment of Azure resources\nClicking on \""Deploy push connector resources\"" will trigger the creation of DCR and DCE resources.\nIt will then create a Microsoft Entra app registration with client secret and grant permissions on the DCR. This setup enables data to be sent securely to the DCR using a OAuth v2 client credentials.""}}, {""parameters"": {""label"": ""Deploy push connector resources"", ""applicationDisplayName"": ""Pathlock Threat Detection and Response forwarding to Microsoft Sentinel Solution for SAP""}, ""type"": ""DeployPushConnectorButton_test""}]}, {""title"": ""2. Maintain the data collection endpoint details and authentication info in Pathlock Threat Detection and Response"", ""description"": ""Share the data collection endpoint URL and authentication info with the Pathlock Threat Detection and Response Integration administrator to configure the Integration."", ""instructions"": [{""parameters"": {""label"": ""Use this value to configure as Tenant ID in the LogIngestionAPI credential."", ""fillWith"": [""TenantId""]}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra Application ID"", ""fillWith"": [""ApplicationId""], ""placeholder"": ""Deploy push connector to get the Application ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra Application Secret"", ""fillWith"": [""ApplicationSecret""], ""placeholder"": ""Deploy push connector to get the Application Secret""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Use this value to configure the LogsIngestionURL parameter."", ""fillWith"": [""DataCollectionEndpoint""], ""placeholder"": ""Deploy push connector to get the DCE URI""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""DCR Immutable ID"", ""fillWith"": [""DataCollectionRuleId""], ""placeholder"": ""Deploy push connector to get the DCR ID""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft Entra"", ""description"": ""Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher.""}, {""name"": ""Microsoft Azure"", ""description"": ""Permission to assign Monitoring Metrics Publisher role on data collection rules. Typically requires Azure RBAC Owner or User Access Administrator role.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Pathlock_TDnR/Data%20Connectors/Pathlock_TDnR.json","true" -"","Perimeter 81","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Perimeter%2081","perimeter811605117499319","perimeter_81___mss","2022-05-06","","","Perimeter 81","Partner","https://support.perimeter81.com/docs","","domains","","","","","","","false","","false" -"Perimeter81_CL","Perimeter 81","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Perimeter%2081","perimeter811605117499319","perimeter_81___mss","2022-05-06","","","Perimeter 81","Partner","https://support.perimeter81.com/docs","","domains","Perimeter81ActivityLogs","Perimeter 81","Perimeter 81 Activity Logs","The Perimeter 81 Activity Logs connector allows you to easily connect your Perimeter 81 activity logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation.","[{""title"": """", ""description"": ""Please note the values below and follow the instructions here to connect your Perimeter 81 activity logs with Microsoft Sentinel."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Perimeter%2081/Data%20Connectors/Perimeter81ActivityLogs.json","true" -"","Phosphorus","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Phosphorus","4043","microsoft-sentinel-solution-phosphorus","2024-08-13","2024-08-13","","Phosphorus Inc.","Partner","https://phosphorus.io","","domains","","","","","","","false","","false" -"Phosphorus_CL","Phosphorus","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Phosphorus","4043","microsoft-sentinel-solution-phosphorus","2024-08-13","2024-08-13","","Phosphorus Inc.","Partner","https://phosphorus.io","","domains","Phosphorus_Polling","Phosphorus Inc.","Phosphorus Devices","The Phosphorus Device Connector provides the capability to Phosphorus to ingest device data logs into Microsoft Sentinel through the Phosphorus REST API. The Connector provides visibility into the devices enrolled in Phosphorus. This Data Connector pulls devices information along with its corresponding alerts.","[{""description"": ""**STEP 1 - Configuration steps for the Phosphorus API**\n\n Follow these instructions to create a Phosphorus API key.\n 1. Log into your Phosphorus instance\n 2. Navigate to Settings -> API \n 3. If the API key has not already been created, press the **Add button** to create the API key\n 4. The API key can now be copied and used during the Phosphorus Device connector configuration""}, {""title"": ""Connect the Phosphorus Application with Microsoft Sentinel"", ""description"": ""**STEP 2 - Fill in the details below**\n\n>**IMPORTANT:** Before deploying the Phosphorus Device data connector, have the Phosphorus Instance Domain Name readily available as well as the Phosphorus API Key(s)"", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""Domain Name"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{urlPlaceHolder}}"", ""placeHolderValue"": """"}, {""displayText"": ""Integration Name"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{integrationName}}"", ""placeHolderValue"": """"}]}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true, ""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""REST API Credentials/permissions"", ""description"": ""**Phosphorus API Key** is required. Please make sure that the API Key associated with the User has the Manage Settings permissions enabled.\n\n Follow these instructions to enable Manage Settings permissions.\n 1. Log in to the Phosphorus Application\n 2. Go to 'Settings' -> 'Groups'\n 3. Select the Group the Integration user is a part of\n 4. Navigate to 'Product Actions' -> toggle on the 'Manage Settings' permission. ""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Phosphorus/Data%20Connectors/PhosphorusDataConnector.json","true" -"","PingFederate","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PingFederate","azuresentinel","azure-sentinel-solution-pingfederate","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"CommonSecurityLog","PingFederate","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PingFederate","azuresentinel","azure-sentinel-solution-pingfederate","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PingFederate","Ping Identity","[Deprecated] PingFederate via Legacy Agent","The [PingFederate](https://www.pingidentity.com/en/software/pingfederate.html) data connector provides the capability to ingest [PingFederate events](https://docs.pingidentity.com/bundle/pingfederate-102/page/lly1564002980532.html) into Microsoft Sentinel. Refer to [PingFederate documentation](https://docs.pingidentity.com/bundle/pingfederate-102/page/tle1564002955874.html) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**PingFederateEvent**](https://aka.ms/sentinel-PingFederate-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""[Follow these steps](https://docs.pingidentity.com/bundle/pingfederate-102/page/gsn1564002980953.html) to configure PingFederate sending audit log via syslog in CEF format.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PingFederate/Data%20Connectors/Connector_CEF_PingFederate.json","true" -"CommonSecurityLog","PingFederate","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PingFederate","azuresentinel","azure-sentinel-solution-pingfederate","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PingFederateAma","Ping Identity","[Deprecated] PingFederate via AMA","The [PingFederate](https://www.pingidentity.com/en/software/pingfederate.html) data connector provides the capability to ingest [PingFederate events](https://docs.pingidentity.com/bundle/pingfederate-102/page/lly1564002980532.html) into Microsoft Sentinel. Refer to [PingFederate documentation](https://docs.pingidentity.com/bundle/pingfederate-102/page/tle1564002955874.html) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**PingFederateEvent**](https://aka.ms/sentinel-PingFederate-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""[Follow these steps](https://docs.pingidentity.com/bundle/pingfederate-102/page/gsn1564002980953.html) to configure PingFederate sending audit log via syslog in CEF format."", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PingFederate/Data%20Connectors/template_PingFederateAMA.json","true" -"","PingOne","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PingOne","azuresentinel","azure-sentinel-pingone","2025-04-20","2025-04-20","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"PingOne_AuditActivitiesV2_CL","PingOne","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PingOne","azuresentinel","azure-sentinel-pingone","2025-04-20","2025-04-20","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PingOneAuditLogsCCPDefinition","Microsoft","Ping One (via Codeless Connector Framework)","This connector ingests **audit activity logs** from the PingOne Identity platform into Microsoft Sentinel using a Codeless Connector Framework.","[{""title"": ""Connect Ping One connector to Microsoft Sentinel"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""Before connecting to PingOne, ensure the following prerequisites are completed. Refer to the [document](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PingOne/README.md) for detailed setup instructions, including how to obtain client credentials and the environment ID.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Client Credentials \n You'll need client credentials, including your client id and client secret.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Environment Id \n To generate token and gather logs from audit activities endpoint""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Environment ID"", ""columnValue"": ""properties.addOnAttributes.EnvironmentId""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add domain"", ""title"": ""Add domain"", ""subtitle"": ""Add domain"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Client ID"", ""placeholder"": ""Enter ID of the client"", ""type"": ""text"", ""name"": ""clientId"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client Secret"", ""placeholder"": ""Enter your secret key"", ""type"": ""password"", ""name"": ""clientSecret"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Environment ID"", ""placeholder"": ""Enter your environment Id "", ""type"": ""text"", ""name"": ""environmentId"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Api domain"", ""placeholder"": ""Enter your Api domain Eg.( pingone.com,pingone.eu etc )depending on the region credentials created for "", ""type"": ""text"", ""name"": ""apidomain"", ""required"": true}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": false, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PingOne/Data%20Connectors/PingOneAuditLogs_ccp/PingOneAuditLogs_DataConnectorDefinition.json","true" -"","PostgreSQL","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PostgreSQL","azuresentinel","azure-sentinel-solution-postgresql","2022-06-27","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"PostgreSQL_CL","PostgreSQL","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PostgreSQL","azuresentinel","azure-sentinel-solution-postgresql","2022-06-27","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PostgreSQL","PostgreSQL","[Deprecated] PostgreSQL Events","PostgreSQL data connector provides the capability to ingest [PostgreSQL](https://www.postgresql.org/) events into Microsoft Sentinel. Refer to [PostgreSQL documentation](https://www.postgresql.org/docs/current/index.html) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on PostgreSQL parser based on a Kusto Function to work as expected. This parser is installed along with solution installation."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Tomcat Server where the logs are generated.\n\n> Logs from PostgreSQL Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure PostgreSQL to write logs to files"", ""description"": ""1. Edit postgresql.conf file to write logs to files:\n\n>**log_destination** = 'stderr'\n\n>**logging_collector** = on\n\nSet the following parameters: **log_directory** and **log_filename**. Refer to the [PostgreSQL documentation for more details](https://www.postgresql.org/docs/current/runtime-config-logging.html)""}, {""title"": ""3. Configure the logs to be collected"", ""description"": ""Configure the custom log directory to be collected"", ""instructions"": [{""parameters"": {""linkType"": ""OpenCustomLogsSettings""}, ""type"": ""InstallAgent""}]}, {""title"": """", ""description"": ""1. Select the link above to open your workspace advanced settings \n2. From the left pane, select **Settings**, select **Custom Logs** and click **+Add custom log**\n3. Click **Browse** to upload a sample of a PostgreSQL log file. Then, click **Next >**\n4. Select **Timestamp** as the record delimiter and click **Next >**\n5. Select **Windows** or **Linux** and enter the path to PostgreSQL logs based on your configuration(e.g. for some Linux distros the default path is /var/log/postgresql/) \n6. After entering the path, click the '+' symbol to apply, then click **Next >** \n7. Add **PostgreSQL** as the custom log Name (the '_CL' suffix will be added automatically) and click **Done**.""}, {""title"": ""Validate connectivity"", ""description"": ""It may take upwards of 20 minutes until your logs start to appear in Microsoft Sentinel.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PostgreSQL/Data%20Connectors/Connector_PostgreSQL.json","true" -"","Power Platform","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Power%20Platform","","","","","","","","","","","","","","","","","false","","false" -"","Prancer PenSuiteAI Integration","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Prancer%20PenSuiteAI%20Integration","prancerenterprise1600813133757","microsoft-sentinel-solution-prancer","2023-08-02","","","Prancer PenSuiteAI Integration","Partner","https://www.prancer.io","","domains","","","","","","","false","","false" -"prancer_CL","Prancer PenSuiteAI Integration","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Prancer%20PenSuiteAI%20Integration","prancerenterprise1600813133757","microsoft-sentinel-solution-prancer","2023-08-02","","","Prancer PenSuiteAI Integration","Partner","https://www.prancer.io","","domains","PrancerLogData","Prancer","Prancer Data Connector","The Prancer Data Connector has provides the capability to ingest Prancer (CSPM)[https://docs.prancer.io/web/CSPM/] and [PAC](https://docs.prancer.io/web/PAC/introduction/) data to process through Microsoft Sentinel. Refer to [Prancer Documentation](https://docs.prancer.io/web) for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Prancer REST API to pull logs into Microsoft sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": ""STEP 1: Follow the documentation on the [Prancer Documentation Site](https://docs.prancer.io/web/) in order to set up an scan with an azure cloud connector.""}, {""title"": """", ""description"": ""STEP 2: Once the scan is created go to the 'Third Part Integrations' menu for the scan and select Sentinel.""}, {""title"": """", ""description"": ""STEP 3: Create follow the configuration wizard to select where in Azure the results should be sent to.""}, {""title"": """", ""description"": ""STEP 4: Data should start to get fed into Microsoft Sentinel for processing.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Include custom pre-requisites if the connectivity requires - else delete customs"", ""description"": ""Description for any custom pre-requisite""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Prancer%20PenSuiteAI%20Integration/Data%20Connectors/PrancerLogData.json","true" -"","ProofPointTap","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap","proofpointinc1600438591120","azure-sentinel-proofpoint","2022-05-23","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","","","","","","","false","","false" -"ProofPointTAPClicksBlocked_CL","ProofPointTap","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap","proofpointinc1600438591120","azure-sentinel-proofpoint","2022-05-23","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointTAP","Proofpoint","[Deprecated] Proofpoint TAP","The [Proofpoint Targeted Attack Protection (TAP)](https://www.proofpoint.com/us/products/advanced-threat-protection/targeted-attack-protection) connector provides the capability to ingest Proofpoint TAP logs and events into Microsoft Sentinel. The connector provides visibility into Message and Click events in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.

NOTE: This data connector has been deprecated, consider moving to the CCP data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to Proofpoint TAP to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Proofpoint TAP API**\n\n1. Log into the Proofpoint TAP console \n2. Navigate to **Connect Applications** and select **Service Principal**\n3. Create a **Service Principal** (API Authorization Key)""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Proofpoint TAP connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Proofpoint TAP API Authorization Key(s), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Proofpoint TAP connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelproofpointtapazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelproofpointtapazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **API Username**, **API Password**, and validate the **Uri**.\n> - The default URI is pulling data for the last 300 seconds (5 minutes) to correspond with the default Function App Timer trigger of 5 minutes. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion. \n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""This method provides the step-by-step instructions to deploy the Proofpoint TAP connector manually with Azure Function (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the [Azure Function App](https://aka.ms/sentinelproofpointtapazurefunctionzip) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following six (6) application settings individually, with their respective string values (case-sensitive): \n\t\tapiUsername\n\t\tapipassword\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\turi\n\t\tlogAnalyticsUri (optional)\n> - Set the `uri` value to: `https://tap-api-v2.proofpoint.com/v2/siem/all?format=json&sinceSeconds=300`\n> - The default URI is pulling data for the last 300 seconds (5 minutes) to correspond with the default Function App Timer trigger of 5 minutes. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly to prevent overlapping data ingestion.\n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`\n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Proofpoint TAP API Key"", ""description"": ""A Proofpoint TAP API username and password is required. [See the documentation to learn more about Proofpoint SIEM API](https://help.proofpoint.com/Threat_Insight_Dashboard/API_Documentation/SIEM_API).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap/Data%20Connectors/ProofpointTAP_API_FunctionApp.json","true" -"ProofPointTAPClicksPermitted_CL","ProofPointTap","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap","proofpointinc1600438591120","azure-sentinel-proofpoint","2022-05-23","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointTAP","Proofpoint","[Deprecated] Proofpoint TAP","The [Proofpoint Targeted Attack Protection (TAP)](https://www.proofpoint.com/us/products/advanced-threat-protection/targeted-attack-protection) connector provides the capability to ingest Proofpoint TAP logs and events into Microsoft Sentinel. The connector provides visibility into Message and Click events in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.

NOTE: This data connector has been deprecated, consider moving to the CCP data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to Proofpoint TAP to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Proofpoint TAP API**\n\n1. Log into the Proofpoint TAP console \n2. Navigate to **Connect Applications** and select **Service Principal**\n3. Create a **Service Principal** (API Authorization Key)""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Proofpoint TAP connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Proofpoint TAP API Authorization Key(s), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Proofpoint TAP connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelproofpointtapazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelproofpointtapazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **API Username**, **API Password**, and validate the **Uri**.\n> - The default URI is pulling data for the last 300 seconds (5 minutes) to correspond with the default Function App Timer trigger of 5 minutes. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion. \n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""This method provides the step-by-step instructions to deploy the Proofpoint TAP connector manually with Azure Function (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the [Azure Function App](https://aka.ms/sentinelproofpointtapazurefunctionzip) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following six (6) application settings individually, with their respective string values (case-sensitive): \n\t\tapiUsername\n\t\tapipassword\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\turi\n\t\tlogAnalyticsUri (optional)\n> - Set the `uri` value to: `https://tap-api-v2.proofpoint.com/v2/siem/all?format=json&sinceSeconds=300`\n> - The default URI is pulling data for the last 300 seconds (5 minutes) to correspond with the default Function App Timer trigger of 5 minutes. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly to prevent overlapping data ingestion.\n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`\n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Proofpoint TAP API Key"", ""description"": ""A Proofpoint TAP API username and password is required. [See the documentation to learn more about Proofpoint SIEM API](https://help.proofpoint.com/Threat_Insight_Dashboard/API_Documentation/SIEM_API).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap/Data%20Connectors/ProofpointTAP_API_FunctionApp.json","true" -"ProofPointTAPMessagesBlocked_CL","ProofPointTap","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap","proofpointinc1600438591120","azure-sentinel-proofpoint","2022-05-23","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointTAP","Proofpoint","[Deprecated] Proofpoint TAP","The [Proofpoint Targeted Attack Protection (TAP)](https://www.proofpoint.com/us/products/advanced-threat-protection/targeted-attack-protection) connector provides the capability to ingest Proofpoint TAP logs and events into Microsoft Sentinel. The connector provides visibility into Message and Click events in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.

NOTE: This data connector has been deprecated, consider moving to the CCP data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to Proofpoint TAP to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Proofpoint TAP API**\n\n1. Log into the Proofpoint TAP console \n2. Navigate to **Connect Applications** and select **Service Principal**\n3. Create a **Service Principal** (API Authorization Key)""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Proofpoint TAP connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Proofpoint TAP API Authorization Key(s), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Proofpoint TAP connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelproofpointtapazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelproofpointtapazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **API Username**, **API Password**, and validate the **Uri**.\n> - The default URI is pulling data for the last 300 seconds (5 minutes) to correspond with the default Function App Timer trigger of 5 minutes. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion. \n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""This method provides the step-by-step instructions to deploy the Proofpoint TAP connector manually with Azure Function (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the [Azure Function App](https://aka.ms/sentinelproofpointtapazurefunctionzip) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following six (6) application settings individually, with their respective string values (case-sensitive): \n\t\tapiUsername\n\t\tapipassword\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\turi\n\t\tlogAnalyticsUri (optional)\n> - Set the `uri` value to: `https://tap-api-v2.proofpoint.com/v2/siem/all?format=json&sinceSeconds=300`\n> - The default URI is pulling data for the last 300 seconds (5 minutes) to correspond with the default Function App Timer trigger of 5 minutes. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly to prevent overlapping data ingestion.\n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`\n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Proofpoint TAP API Key"", ""description"": ""A Proofpoint TAP API username and password is required. [See the documentation to learn more about Proofpoint SIEM API](https://help.proofpoint.com/Threat_Insight_Dashboard/API_Documentation/SIEM_API).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap/Data%20Connectors/ProofpointTAP_API_FunctionApp.json","true" -"ProofPointTAPMessagesDelivered_CL","ProofPointTap","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap","proofpointinc1600438591120","azure-sentinel-proofpoint","2022-05-23","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointTAP","Proofpoint","[Deprecated] Proofpoint TAP","The [Proofpoint Targeted Attack Protection (TAP)](https://www.proofpoint.com/us/products/advanced-threat-protection/targeted-attack-protection) connector provides the capability to ingest Proofpoint TAP logs and events into Microsoft Sentinel. The connector provides visibility into Message and Click events in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.

NOTE: This data connector has been deprecated, consider moving to the CCP data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to Proofpoint TAP to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Proofpoint TAP API**\n\n1. Log into the Proofpoint TAP console \n2. Navigate to **Connect Applications** and select **Service Principal**\n3. Create a **Service Principal** (API Authorization Key)""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Proofpoint TAP connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Proofpoint TAP API Authorization Key(s), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Proofpoint TAP connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelproofpointtapazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelproofpointtapazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **API Username**, **API Password**, and validate the **Uri**.\n> - The default URI is pulling data for the last 300 seconds (5 minutes) to correspond with the default Function App Timer trigger of 5 minutes. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion. \n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""This method provides the step-by-step instructions to deploy the Proofpoint TAP connector manually with Azure Function (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the [Azure Function App](https://aka.ms/sentinelproofpointtapazurefunctionzip) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following six (6) application settings individually, with their respective string values (case-sensitive): \n\t\tapiUsername\n\t\tapipassword\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\turi\n\t\tlogAnalyticsUri (optional)\n> - Set the `uri` value to: `https://tap-api-v2.proofpoint.com/v2/siem/all?format=json&sinceSeconds=300`\n> - The default URI is pulling data for the last 300 seconds (5 minutes) to correspond with the default Function App Timer trigger of 5 minutes. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly to prevent overlapping data ingestion.\n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`\n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Proofpoint TAP API Key"", ""description"": ""A Proofpoint TAP API username and password is required. [See the documentation to learn more about Proofpoint SIEM API](https://help.proofpoint.com/Threat_Insight_Dashboard/API_Documentation/SIEM_API).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap/Data%20Connectors/ProofpointTAP_API_FunctionApp.json","true" -"ProofPointTAPClicksBlockedV2_CL","ProofPointTap","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap","proofpointinc1600438591120","azure-sentinel-proofpoint","2022-05-23","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointTAPv2","Proofpoint","Proofpoint TAP (via Codeless Connector Platform)","The [Proofpoint Targeted Attack Protection (TAP)](https://www.proofpoint.com/us/products/advanced-threat-protection/targeted-attack-protection) connector provides the capability to ingest Proofpoint TAP logs and events into Microsoft Sentinel. The connector provides visibility into Message and Click events in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.","[{""description"": ""**Configuration steps for the Proofpoint TAP API**\n\n1. Log into the [Proofpoint TAP dashboard](https://threatinsight.proofpoint.com/) \n2. Navigate to **Settings** and go to **Connected Applications** tab \n 3. Click on **Create New Credential** \n 4. Provide a name and click **Generate** \n 5. Copy **Service Principal** and **Secret** values""}, {""description"": "">**NOTE:** This connector depends on a parser based on Kusto Function to work as expected [**ProofpointTAPEvent**](https://aka.ms/sentinel-ProofpointTAPDataConnector-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Service Principal"", ""placeholder"": ""123456"", ""type"": ""text"", ""name"": ""username""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Secret"", ""placeholder"": ""123456"", ""type"": ""password"", ""name"": ""password""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""disconnectLabel"": ""Disconnect"", ""name"": ""connectionToggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Proofpoint TAP API Key"", ""description"": ""A Proofpoint TAP API service principal and secret is required to access Proofpoint's SIEM API. [See the documentation to learn more about Proofpoint SIEM API](https://help.proofpoint.com/Threat_Insight_Dashboard/API_Documentation/SIEM_API).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap/Data%20Connectors/ProofpointTAP_CCP/ProofpointTAP_defination.json","true" -"ProofPointTAPClicksPermittedV2_CL","ProofPointTap","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap","proofpointinc1600438591120","azure-sentinel-proofpoint","2022-05-23","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointTAPv2","Proofpoint","Proofpoint TAP (via Codeless Connector Platform)","The [Proofpoint Targeted Attack Protection (TAP)](https://www.proofpoint.com/us/products/advanced-threat-protection/targeted-attack-protection) connector provides the capability to ingest Proofpoint TAP logs and events into Microsoft Sentinel. The connector provides visibility into Message and Click events in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.","[{""description"": ""**Configuration steps for the Proofpoint TAP API**\n\n1. Log into the [Proofpoint TAP dashboard](https://threatinsight.proofpoint.com/) \n2. Navigate to **Settings** and go to **Connected Applications** tab \n 3. Click on **Create New Credential** \n 4. Provide a name and click **Generate** \n 5. Copy **Service Principal** and **Secret** values""}, {""description"": "">**NOTE:** This connector depends on a parser based on Kusto Function to work as expected [**ProofpointTAPEvent**](https://aka.ms/sentinel-ProofpointTAPDataConnector-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Service Principal"", ""placeholder"": ""123456"", ""type"": ""text"", ""name"": ""username""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Secret"", ""placeholder"": ""123456"", ""type"": ""password"", ""name"": ""password""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""disconnectLabel"": ""Disconnect"", ""name"": ""connectionToggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Proofpoint TAP API Key"", ""description"": ""A Proofpoint TAP API service principal and secret is required to access Proofpoint's SIEM API. [See the documentation to learn more about Proofpoint SIEM API](https://help.proofpoint.com/Threat_Insight_Dashboard/API_Documentation/SIEM_API).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap/Data%20Connectors/ProofpointTAP_CCP/ProofpointTAP_defination.json","true" -"ProofPointTAPMessagesBlockedV2_CL","ProofPointTap","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap","proofpointinc1600438591120","azure-sentinel-proofpoint","2022-05-23","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointTAPv2","Proofpoint","Proofpoint TAP (via Codeless Connector Platform)","The [Proofpoint Targeted Attack Protection (TAP)](https://www.proofpoint.com/us/products/advanced-threat-protection/targeted-attack-protection) connector provides the capability to ingest Proofpoint TAP logs and events into Microsoft Sentinel. The connector provides visibility into Message and Click events in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.","[{""description"": ""**Configuration steps for the Proofpoint TAP API**\n\n1. Log into the [Proofpoint TAP dashboard](https://threatinsight.proofpoint.com/) \n2. Navigate to **Settings** and go to **Connected Applications** tab \n 3. Click on **Create New Credential** \n 4. Provide a name and click **Generate** \n 5. Copy **Service Principal** and **Secret** values""}, {""description"": "">**NOTE:** This connector depends on a parser based on Kusto Function to work as expected [**ProofpointTAPEvent**](https://aka.ms/sentinel-ProofpointTAPDataConnector-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Service Principal"", ""placeholder"": ""123456"", ""type"": ""text"", ""name"": ""username""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Secret"", ""placeholder"": ""123456"", ""type"": ""password"", ""name"": ""password""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""disconnectLabel"": ""Disconnect"", ""name"": ""connectionToggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Proofpoint TAP API Key"", ""description"": ""A Proofpoint TAP API service principal and secret is required to access Proofpoint's SIEM API. [See the documentation to learn more about Proofpoint SIEM API](https://help.proofpoint.com/Threat_Insight_Dashboard/API_Documentation/SIEM_API).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap/Data%20Connectors/ProofpointTAP_CCP/ProofpointTAP_defination.json","true" -"ProofPointTAPMessagesDeliveredV2_CL","ProofPointTap","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap","proofpointinc1600438591120","azure-sentinel-proofpoint","2022-05-23","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointTAPv2","Proofpoint","Proofpoint TAP (via Codeless Connector Platform)","The [Proofpoint Targeted Attack Protection (TAP)](https://www.proofpoint.com/us/products/advanced-threat-protection/targeted-attack-protection) connector provides the capability to ingest Proofpoint TAP logs and events into Microsoft Sentinel. The connector provides visibility into Message and Click events in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.","[{""description"": ""**Configuration steps for the Proofpoint TAP API**\n\n1. Log into the [Proofpoint TAP dashboard](https://threatinsight.proofpoint.com/) \n2. Navigate to **Settings** and go to **Connected Applications** tab \n 3. Click on **Create New Credential** \n 4. Provide a name and click **Generate** \n 5. Copy **Service Principal** and **Secret** values""}, {""description"": "">**NOTE:** This connector depends on a parser based on Kusto Function to work as expected [**ProofpointTAPEvent**](https://aka.ms/sentinel-ProofpointTAPDataConnector-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Service Principal"", ""placeholder"": ""123456"", ""type"": ""text"", ""name"": ""username""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Secret"", ""placeholder"": ""123456"", ""type"": ""password"", ""name"": ""password""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""disconnectLabel"": ""Disconnect"", ""name"": ""connectionToggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Proofpoint TAP API Key"", ""description"": ""A Proofpoint TAP API service principal and secret is required to access Proofpoint's SIEM API. [See the documentation to learn more about Proofpoint SIEM API](https://help.proofpoint.com/Threat_Insight_Dashboard/API_Documentation/SIEM_API).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap/Data%20Connectors/ProofpointTAP_CCP/ProofpointTAP_defination.json","true" -"","Proofpoint On demand(POD) Email Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Proofpoint%20On%20demand%28POD%29%20Email%20Security","proofpointinc1600438591120","azure-sentinel-proofpointpod","2021-03-31","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","","","","","","","false","","false" -"ProofpointPODMailLog_CL","Proofpoint On demand(POD) Email Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Proofpoint%20On%20demand%28POD%29%20Email%20Security","proofpointinc1600438591120","azure-sentinel-proofpointpod","2021-03-31","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointCCPDefinition","Proofpoint","Proofpoint On Demand Email Security (via Codeless Connector Platform)","Proofpoint On Demand Email Security data connector provides the capability to get Proofpoint on Demand Email Protection data, allows users to check message traceability, monitoring into email activity, threats,and data exfiltration by attackers and malicious insiders. The connector provides ability to review events in your org on an accelerated basis, get event log files in hourly increments for recent activity.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the Proofpoint POD Websocket API \n #### The PoD Log API does not allow use of the same token for more than one session at the same time, so make sure your token isn't used anywhere. \n Proofpoint Websocket API service requires Remote Syslog Forwarding license. Please refer the [documentation](https://proofpointcommunities.force.com/community/s/article/Proofpoint-on-Demand-Pod-Log-API) on how to enable and check PoD Log API. \n You must provide your cluster id and security token.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve the cluster id\n 1.1. Log in to the [proofpoint](https://admin.proofpoint.com/) [**Management Console**] with Admin user credentials\n\n 1.2. In the **Management Console**, the cluster id is displayed in the upper-right corner.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve the API token\n 2.1. Log in to the [proofpoint](https://admin.proofpoint.com/) [**Management Console**] with Admin user credentials\n\n 2.2. In the **Management Console**, click **Settings** -> **API Key Management** \n\n 2.3. Under **API Key Management** click on the **PoD Logging** tab.\n\n 2.4. Get or create a new API key.""}}, {""parameters"": {""label"": ""Cluster Id"", ""placeholder"": ""cluster_id"", ""type"": ""text"", ""name"": ""clusterId""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""API Key"", ""placeholder"": ""API Key"", ""type"": ""text"", ""name"": ""apiKey""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}], ""customs"": [{""name"": ""Websocket API Credentials/permissions"", ""description"": ""**ProofpointClusterID**, and **ProofpointToken** are required. [See the documentation to learn more about API](https://proofpointcommunities.force.com/community/s/article/Proofpoint-on-Demand-Pod-Log-API).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Proofpoint%20On%20demand%28POD%29%20Email%20Security/Data%20Connectors/ProofPointEmailSecurity_CCP/ProofpointPOD_Definaton.json","true" -"ProofpointPODMessage_CL","Proofpoint On demand(POD) Email Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Proofpoint%20On%20demand%28POD%29%20Email%20Security","proofpointinc1600438591120","azure-sentinel-proofpointpod","2021-03-31","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointCCPDefinition","Proofpoint","Proofpoint On Demand Email Security (via Codeless Connector Platform)","Proofpoint On Demand Email Security data connector provides the capability to get Proofpoint on Demand Email Protection data, allows users to check message traceability, monitoring into email activity, threats,and data exfiltration by attackers and malicious insiders. The connector provides ability to review events in your org on an accelerated basis, get event log files in hourly increments for recent activity.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the Proofpoint POD Websocket API \n #### The PoD Log API does not allow use of the same token for more than one session at the same time, so make sure your token isn't used anywhere. \n Proofpoint Websocket API service requires Remote Syslog Forwarding license. Please refer the [documentation](https://proofpointcommunities.force.com/community/s/article/Proofpoint-on-Demand-Pod-Log-API) on how to enable and check PoD Log API. \n You must provide your cluster id and security token.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve the cluster id\n 1.1. Log in to the [proofpoint](https://admin.proofpoint.com/) [**Management Console**] with Admin user credentials\n\n 1.2. In the **Management Console**, the cluster id is displayed in the upper-right corner.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve the API token\n 2.1. Log in to the [proofpoint](https://admin.proofpoint.com/) [**Management Console**] with Admin user credentials\n\n 2.2. In the **Management Console**, click **Settings** -> **API Key Management** \n\n 2.3. Under **API Key Management** click on the **PoD Logging** tab.\n\n 2.4. Get or create a new API key.""}}, {""parameters"": {""label"": ""Cluster Id"", ""placeholder"": ""cluster_id"", ""type"": ""text"", ""name"": ""clusterId""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""API Key"", ""placeholder"": ""API Key"", ""type"": ""text"", ""name"": ""apiKey""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}], ""customs"": [{""name"": ""Websocket API Credentials/permissions"", ""description"": ""**ProofpointClusterID**, and **ProofpointToken** are required. [See the documentation to learn more about API](https://proofpointcommunities.force.com/community/s/article/Proofpoint-on-Demand-Pod-Log-API).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Proofpoint%20On%20demand%28POD%29%20Email%20Security/Data%20Connectors/ProofPointEmailSecurity_CCP/ProofpointPOD_Definaton.json","true" -"ProofpointPODMailLog_CL","Proofpoint On demand(POD) Email Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Proofpoint%20On%20demand%28POD%29%20Email%20Security","proofpointinc1600438591120","azure-sentinel-proofpointpod","2021-03-31","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointPOD","Proofpoint","[Deprecated] Proofpoint On Demand Email Security","Proofpoint On Demand Email Security data connector provides the capability to get Proofpoint on Demand Email Protection data, allows users to check message traceability, monitoring into email activity, threats,and data exfiltration by attackers and malicious insiders. The connector provides ability to review events in your org on an accelerated basis, get event log files in hourly increments for recent activity.

NOTE: This data connector has been deprecated, consider moving to the CCP data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Proofpoint Websocket API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://aka.ms/sentinel-proofpointpod-parser) to create the Kusto functions alias, **ProofpointPOD**""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Proofpoint Websocket API**\n\n1. Proofpoint Websocket API service requires Remote Syslog Forwarding license. Please refer the [documentation](https://proofpointcommunities.force.com/community/s/article/Proofpoint-on-Demand-Pod-Log-API) on how to enable and check PoD Log API. \n2. You must provide your cluster id and security token.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Proofpoint On Demand Email Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Proofpoint POD Log API credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Proofpoint On Demand Email Security data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-proofpointpod-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ProofpointClusterID**, **ProofpointToken** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Proofpoint On Demand Email Security data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> NOTE:You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-proofpointpod-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ProofpointXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tProofpointClusterID\n\t\tProofpointToken\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Websocket API Credentials/permissions"", ""description"": ""**ProofpointClusterID**, **ProofpointToken** is required. [See the documentation to learn more about API](https://proofpointcommunities.force.com/community/s/article/Proofpoint-on-Demand-Pod-Log-API).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Proofpoint%20On%20demand%28POD%29%20Email%20Security/Data%20Connectors/ProofpointPOD_API_FunctionApp.json","true" -"ProofpointPODMessage_CL","Proofpoint On demand(POD) Email Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Proofpoint%20On%20demand%28POD%29%20Email%20Security","proofpointinc1600438591120","azure-sentinel-proofpointpod","2021-03-31","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointPOD","Proofpoint","[Deprecated] Proofpoint On Demand Email Security","Proofpoint On Demand Email Security data connector provides the capability to get Proofpoint on Demand Email Protection data, allows users to check message traceability, monitoring into email activity, threats,and data exfiltration by attackers and malicious insiders. The connector provides ability to review events in your org on an accelerated basis, get event log files in hourly increments for recent activity.

NOTE: This data connector has been deprecated, consider moving to the CCP data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Proofpoint Websocket API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://aka.ms/sentinel-proofpointpod-parser) to create the Kusto functions alias, **ProofpointPOD**""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Proofpoint Websocket API**\n\n1. Proofpoint Websocket API service requires Remote Syslog Forwarding license. Please refer the [documentation](https://proofpointcommunities.force.com/community/s/article/Proofpoint-on-Demand-Pod-Log-API) on how to enable and check PoD Log API. \n2. You must provide your cluster id and security token.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Proofpoint On Demand Email Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Proofpoint POD Log API credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Proofpoint On Demand Email Security data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-proofpointpod-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ProofpointClusterID**, **ProofpointToken** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Proofpoint On Demand Email Security data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> NOTE:You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-proofpointpod-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ProofpointXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tProofpointClusterID\n\t\tProofpointToken\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Websocket API Credentials/permissions"", ""description"": ""**ProofpointClusterID**, **ProofpointToken** is required. [See the documentation to learn more about API](https://proofpointcommunities.force.com/community/s/article/Proofpoint-on-Demand-Pod-Log-API).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Proofpoint%20On%20demand%28POD%29%20Email%20Security/Data%20Connectors/ProofpointPOD_API_FunctionApp.json","true" -"ProofpointPOD_maillog_CL","Proofpoint On demand(POD) Email Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Proofpoint%20On%20demand%28POD%29%20Email%20Security","proofpointinc1600438591120","azure-sentinel-proofpointpod","2021-03-31","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointPOD","Proofpoint","[Deprecated] Proofpoint On Demand Email Security","Proofpoint On Demand Email Security data connector provides the capability to get Proofpoint on Demand Email Protection data, allows users to check message traceability, monitoring into email activity, threats,and data exfiltration by attackers and malicious insiders. The connector provides ability to review events in your org on an accelerated basis, get event log files in hourly increments for recent activity.

NOTE: This data connector has been deprecated, consider moving to the CCP data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Proofpoint Websocket API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://aka.ms/sentinel-proofpointpod-parser) to create the Kusto functions alias, **ProofpointPOD**""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Proofpoint Websocket API**\n\n1. Proofpoint Websocket API service requires Remote Syslog Forwarding license. Please refer the [documentation](https://proofpointcommunities.force.com/community/s/article/Proofpoint-on-Demand-Pod-Log-API) on how to enable and check PoD Log API. \n2. You must provide your cluster id and security token.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Proofpoint On Demand Email Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Proofpoint POD Log API credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Proofpoint On Demand Email Security data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-proofpointpod-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ProofpointClusterID**, **ProofpointToken** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Proofpoint On Demand Email Security data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> NOTE:You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-proofpointpod-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ProofpointXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tProofpointClusterID\n\t\tProofpointToken\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Websocket API Credentials/permissions"", ""description"": ""**ProofpointClusterID**, **ProofpointToken** is required. [See the documentation to learn more about API](https://proofpointcommunities.force.com/community/s/article/Proofpoint-on-Demand-Pod-Log-API).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Proofpoint%20On%20demand%28POD%29%20Email%20Security/Data%20Connectors/ProofpointPOD_API_FunctionApp.json","true" -"ProofpointPOD_message_CL","Proofpoint On demand(POD) Email Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Proofpoint%20On%20demand%28POD%29%20Email%20Security","proofpointinc1600438591120","azure-sentinel-proofpointpod","2021-03-31","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointPOD","Proofpoint","[Deprecated] Proofpoint On Demand Email Security","Proofpoint On Demand Email Security data connector provides the capability to get Proofpoint on Demand Email Protection data, allows users to check message traceability, monitoring into email activity, threats,and data exfiltration by attackers and malicious insiders. The connector provides ability to review events in your org on an accelerated basis, get event log files in hourly increments for recent activity.

NOTE: This data connector has been deprecated, consider moving to the CCP data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Proofpoint Websocket API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://aka.ms/sentinel-proofpointpod-parser) to create the Kusto functions alias, **ProofpointPOD**""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Proofpoint Websocket API**\n\n1. Proofpoint Websocket API service requires Remote Syslog Forwarding license. Please refer the [documentation](https://proofpointcommunities.force.com/community/s/article/Proofpoint-on-Demand-Pod-Log-API) on how to enable and check PoD Log API. \n2. You must provide your cluster id and security token.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Proofpoint On Demand Email Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Proofpoint POD Log API credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Proofpoint On Demand Email Security data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-proofpointpod-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ProofpointClusterID**, **ProofpointToken** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Proofpoint On Demand Email Security data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> NOTE:You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-proofpointpod-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ProofpointXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tProofpointClusterID\n\t\tProofpointToken\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Websocket API Credentials/permissions"", ""description"": ""**ProofpointClusterID**, **ProofpointToken** is required. [See the documentation to learn more about API](https://proofpointcommunities.force.com/community/s/article/Proofpoint-on-Demand-Pod-Log-API).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Proofpoint%20On%20demand%28POD%29%20Email%20Security/Data%20Connectors/ProofpointPOD_API_FunctionApp.json","true" -"maillog_CL","Proofpoint On demand(POD) Email Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Proofpoint%20On%20demand%28POD%29%20Email%20Security","proofpointinc1600438591120","azure-sentinel-proofpointpod","2021-03-31","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointPOD","Proofpoint","[Deprecated] Proofpoint On Demand Email Security","Proofpoint On Demand Email Security data connector provides the capability to get Proofpoint on Demand Email Protection data, allows users to check message traceability, monitoring into email activity, threats,and data exfiltration by attackers and malicious insiders. The connector provides ability to review events in your org on an accelerated basis, get event log files in hourly increments for recent activity.

NOTE: This data connector has been deprecated, consider moving to the CCP data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Proofpoint Websocket API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://aka.ms/sentinel-proofpointpod-parser) to create the Kusto functions alias, **ProofpointPOD**""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Proofpoint Websocket API**\n\n1. Proofpoint Websocket API service requires Remote Syslog Forwarding license. Please refer the [documentation](https://proofpointcommunities.force.com/community/s/article/Proofpoint-on-Demand-Pod-Log-API) on how to enable and check PoD Log API. \n2. You must provide your cluster id and security token.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Proofpoint On Demand Email Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Proofpoint POD Log API credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Proofpoint On Demand Email Security data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-proofpointpod-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ProofpointClusterID**, **ProofpointToken** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Proofpoint On Demand Email Security data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> NOTE:You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-proofpointpod-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ProofpointXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tProofpointClusterID\n\t\tProofpointToken\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Websocket API Credentials/permissions"", ""description"": ""**ProofpointClusterID**, **ProofpointToken** is required. [See the documentation to learn more about API](https://proofpointcommunities.force.com/community/s/article/Proofpoint-on-Demand-Pod-Log-API).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Proofpoint%20On%20demand%28POD%29%20Email%20Security/Data%20Connectors/ProofpointPOD_API_FunctionApp.json","true" -"message_CL","Proofpoint On demand(POD) Email Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Proofpoint%20On%20demand%28POD%29%20Email%20Security","proofpointinc1600438591120","azure-sentinel-proofpointpod","2021-03-31","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointPOD","Proofpoint","[Deprecated] Proofpoint On Demand Email Security","Proofpoint On Demand Email Security data connector provides the capability to get Proofpoint on Demand Email Protection data, allows users to check message traceability, monitoring into email activity, threats,and data exfiltration by attackers and malicious insiders. The connector provides ability to review events in your org on an accelerated basis, get event log files in hourly increments for recent activity.

NOTE: This data connector has been deprecated, consider moving to the CCP data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Proofpoint Websocket API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://aka.ms/sentinel-proofpointpod-parser) to create the Kusto functions alias, **ProofpointPOD**""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Proofpoint Websocket API**\n\n1. Proofpoint Websocket API service requires Remote Syslog Forwarding license. Please refer the [documentation](https://proofpointcommunities.force.com/community/s/article/Proofpoint-on-Demand-Pod-Log-API) on how to enable and check PoD Log API. \n2. You must provide your cluster id and security token.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Proofpoint On Demand Email Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Proofpoint POD Log API credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Proofpoint On Demand Email Security data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-proofpointpod-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ProofpointClusterID**, **ProofpointToken** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Proofpoint On Demand Email Security data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> NOTE:You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-proofpointpod-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ProofpointXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tProofpointClusterID\n\t\tProofpointToken\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Websocket API Credentials/permissions"", ""description"": ""**ProofpointClusterID**, **ProofpointToken** is required. [See the documentation to learn more about API](https://proofpointcommunities.force.com/community/s/article/Proofpoint-on-Demand-Pod-Log-API).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Proofpoint%20On%20demand%28POD%29%20Email%20Security/Data%20Connectors/ProofpointPOD_API_FunctionApp.json","true" -"","Pulse Connect Secure","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Pulse%20Connect%20Secure","azuresentinel","azure-sentinel-solution-pulseconnectsecure","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"Syslog","Pulse Connect Secure","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Pulse%20Connect%20Secure","azuresentinel","azure-sentinel-solution-pulseconnectsecure","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PulseConnectSecure","Pulse Secure","[Deprecated] Pulse Connect Secure","The [Pulse Connect Secure](https://www.pulsesecure.net/products/pulse-connect-secure/) connector allows you to easily connect your Pulse Connect Secure logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigations. Integrating Pulse Connect Secure with Microsoft Sentinel provides more insight into your organization's network and improves your security operation capabilities.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Pulse Connect Secure and load the function code or click [here](https://aka.ms/sentinel-PulseConnectSecure-parser), on the second line of the query, enter the hostname(s) of your Pulse Connect Secure device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n 2. Select **Apply below configuration to my machines** and select the facilities and severities.\n 3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure and connect the Pulse Connect Secure"", ""description"": ""[Follow the instructions](https://help.ivanti.com/ps/help/en_US/PPS/9.1R13/ag/configuring_an_external_syslog_server.htm) to enable syslog streaming of Pulse Connect Secure logs. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Pulse Connect Secure"", ""description"": ""must be configured to export logs via Syslog""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Pulse%20Connect%20Secure/Data%20Connectors/Connector_Syslog_PulseConnectSecure.json","true" -"","Pure Storage","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Pure%20Storage","purestoragemarketplaceadmin","microsoft-sentinel-solution-purestorage","2024-02-05","","","purestoragemarketplaceadmin","Partner","https://support.purestorage.com","","domains","","","","","","","false","","false" -"","Qualys VM Knowledgebase","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Qualys%20VM%20Knowledgebase","azuresentinel","azure-sentinel-solution-qualysvmknowledgebase","2022-05-17","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"QualysKB_CL","Qualys VM Knowledgebase","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Qualys%20VM%20Knowledgebase","azuresentinel","azure-sentinel-solution-qualysvmknowledgebase","2022-05-17","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","QualysKB","Qualys","Qualys VM KnowledgeBase","The [Qualys Vulnerability Management (VM)](https://www.qualys.com/apps/vulnerability-management/) KnowledgeBase (KB) connector provides the capability to ingest the latest vulnerability data from the Qualys KB into Microsoft Sentinel.

This data can used to correlate and enrich vulnerability detections found by the [Qualys Vulnerability Management (VM)](https://docs.microsoft.com/azure/sentinel/connect-qualys-vm) data connector.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias QualysVM Knowledgebase and load the function code or click [here](https://aka.ms/sentinel-crowdstrikefalconendpointprotection-parser), on the second line of the query, enter the hostname(s) of your QualysVM Knowledgebase device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": "">This data connector depends on a parser based on a Kusto Function to work as expected. [Follow the steps](https://aka.ms/sentinel-qualyskb-parser) to use the Kusto function alias, **QualysKB**""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Qualys API**\n\n1. Log into the Qualys Vulnerability Management console with an administrator account, select the **Users** tab and the **Users** subtab. \n2. Click on the **New** drop-down menu and select **Users**.\n3. Create a username and password for the API account. \n4. In the **User Roles** tab, ensure the account role is set to **Manager** and access is allowed to **GUI** and **API**\n4. Log out of the administrator account and log into the console with the new API credentials for validation, then log out of the API account. \n5. Log back into the console using an administrator account and modify the API accounts User Roles, removing access to **GUI**. \n6. Save all changes.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Qualys KB connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Qualys API username and password, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Qualys KB connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-qualyskb-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-qualyskb-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **API Username**, **API Password** , update the **URI**, and any additional URI **Filter Parameters** (This value should include a \""&\"" symbol between each parameter and should not include any spaces) \n> - Enter the URI that corresponds to your region. The complete list of API Server URLs can be [found here](https://www.qualys.com/docs/qualys-api-vmpc-user-guide.pdf#G4.735348)\n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n - Note: If deployment failed due to the storage account name being taken, change the **Function Name** to a unique value and redeploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""This method provides the step-by-step instructions to deploy the Qualys KB connector manually with Azure Function."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the [Azure Function App](https://aka.ms/sentinel-qualyskb-functioncode) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following seven (7) application settings individually, with their respective string values (case-sensitive): \n\t\tapiUsername\n\t\tapiPassword\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\turi\n\t\tfilterParameters\n\t\tlogAnalyticsUri (optional)\n> - Enter the URI that corresponds to your region. The complete list of API Server URLs can be [found here](https://www.qualys.com/docs/qualys-api-vmpc-user-guide.pdf#G4.735348). The `uri` value must follow the following schema: `https:///api/2.0` \n> - Add any additional filter parameters, for the `filterParameters` variable, that need to be appended to the URI. The `filterParameter` value should include a \""&\"" symbol between each parameter and should not include any spaces.\n> - Note: If using Azure Key Vault, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n - Use logAnalyticsUri to override the log analytics API endpoint for delegated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Qualys API Key"", ""description"": ""A Qualys VM API username and password is required. [See the documentation to learn more about Qualys VM API](https://www.qualys.com/docs/qualys-api-vmpc-user-guide.pdf).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Qualys%20VM%20Knowledgebase/Data%20Connectors/QualysKB_API_FunctionApp.json","true" -"","QualysVM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/QualysVM","azuresentinel","azure-sentinel-qualysvm","2020-12-14","2025-11-18","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"QualysHostDetectionV3_CL","QualysVM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/QualysVM","azuresentinel","azure-sentinel-qualysvm","2020-12-14","2025-11-18","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","QualysVMLogsCCPDefinition","Microsoft","Qualys Vulnerability Management (via Codeless Connector Framework)","The [Qualys Vulnerability Management (VM)](https://www.qualys.com/apps/vulnerability-management/) data connector provides the capability to ingest vulnerability host detection data into Microsoft Sentinel through the Qualys API. The connector provides visibility into host detection data from vulerability scans.","[{""title"": ""Connect Qualys Vulnerability Management to Microsoft Sentinel"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": "">**NOTE:** To gather data for Detections based on Host, expand the **DetectionList** column in the table.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""To gather data from Qualys VM, you need to provide the following resources""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. API Credentials \n To gather data from Qualys VM, you'll need Qualys API credentials, including your Username and Password.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. API Server URL \n To gather data from Qualys VM, you'll need the Qualys API server URL specific to your region. You can find the exact API server URL for your region [here](https://www.qualys.com/platform-identification/#api-urls)""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Qualys API User Name"", ""placeholder"": ""Enter UserName"", ""type"": ""text"", ""name"": ""username"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Qualys API Password"", ""placeholder"": ""Enter password"", ""type"": ""password"", ""name"": ""password"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Qualys API Server URL"", ""placeholder"": ""Enter API Server URL"", ""type"": ""text"", ""name"": ""apiServerUrl"", ""required"": true, ""description"": ""Ensure the API Server URL starts with https:// and paste the whole API Server URL without / at the ending""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Truncation Limit \n Configure the maximum number of host records to retrieve per API call (20-5000 range). Higher values may improve performance but could impact API response times.""}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Truncation Limit"", ""name"": ""truncationLimit"", ""options"": [{""key"": ""1000"", ""text"": ""1000 - API default value""}, {""key"": ""20"", ""text"": ""20 - Minimal load, slower collection""}, {""key"": ""100"", ""text"": ""100 - Low load""}, {""key"": ""500"", ""text"": ""500 - Moderate load""}, {""key"": ""2500"", ""text"": ""2500 - High load, faster collection""}, {""key"": ""5000"", ""text"": ""5000 - Maximum load, fastest collection""}], ""placeholder"": ""Select truncation limit"", ""isMultiSelect"": false, ""required"": true}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""toggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}], ""customs"": [{""name"": ""API access and roles"", ""description"": ""Ensure the Qualys VM user has a role of Reader or higher. If the role is Reader, ensure that API access is enabled for the account. Auditor role is not supported to access the API. For more details, refer to the Qualys VM [Host Detection API](https://docs.qualys.com/en/vm/qweb-all-api/mergedProjects/qapi-assets/host_lists/host_detection.htm#v_3_0) and [User role Comparison](https://qualysguard.qualys.com/qwebhelp/fo_portal/user_accounts/user_roles_comparison_vm.htm) document.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/QualysVM/Data%20Connectors/QualysVMHostLogs_ccp/QualysVMHostLogs_ConnectorDefinition.json","true" -"QualysHostDetectionV2_CL","QualysVM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/QualysVM","azuresentinel","azure-sentinel-qualysvm","2020-12-14","2025-11-18","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","QualysVulnerabilityManagement","Qualys","[DEPRECATED] Qualys Vulnerability Management","The [Qualys Vulnerability Management (VM)](https://www.qualys.com/apps/vulnerability-management/) data connector provides the capability to ingest vulnerability host detection data into Microsoft Sentinel through the Qualys API. The connector provides visibility into host detection data from vulerability scans. This connector provides Microsoft Sentinel the capability to view dashboards, create custom alerts, and improve investigation

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to Qualys VM to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Qualys VM API**\n\n1. Log into the Qualys Vulnerability Management console with an administrator account, select the **Users** tab and the **Users** subtab. \n2. Click on the **New** drop-down menu and select **Users..**\n3. Create a username and password for the API account. \n4. In the **User Roles** tab, ensure the account role is set to **Manager** and access is allowed to **GUI** and **API**\n4. Log out of the administrator account and log into the console with the new API credentials for validation, then log out of the API account. \n5. Log back into the console using an administrator account and modify the API accounts User Roles, removing access to **GUI**. \n6. Save all changes.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Qualys VM connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Qualys VM API Authorization Key(s), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated, if you have previously deployed an earlier version, and want to update, please delete the existing Qualys VM Azure Function before redeploying this version. Please use Qualys V2 version Workbook, detections. ""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Qualys VM connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-QualysVM-azuredeployV2) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-QualysVM-azuredeployV2-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **API Username**, **API Password** , update the **URI**, and any additional URI **Filter Parameters** (each filter should be separated by an \""&\"" symbol, no spaces.) \n> - Enter the URI that corresponds to your region. The complete list of API Server URLs can be [found here](https://www.qualys.com/docs/qualys-api-vmpc-user-guide.pdf#G4.735348) -- There is no need to add a time suffix to the URI, the Function App will dynamically append the Time Value to the URI in the proper format. \n - The default **Time Interval** is set to pull the last five (5) minutes of data. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion. \n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Quayls VM connector manually with Azure Functions.""}, {""title"": """", ""description"": ""**1. Create a Function App**\n\n1. From the Azure Portal, navigate to [Function App](https://portal.azure.com/#blade/HubsExtension/BrowseResource/resourceType/Microsoft.Web%2Fsites/kind/functionapp), and select **+ Add**.\n2. In the **Basics** tab, ensure Runtime stack is set to **Powershell Core**. \n3. In the **Hosting** tab, ensure the **Consumption (Serverless)** plan type is selected.\n4. Make other preferrable configuration changes, if needed, then click **Create**.""}, {""title"": """", ""description"": ""**2. Import Function App Code**\n\n1. In the newly created Function App, select **Functions** on the left pane and click **+ New Function**.\n2. Select **Timer Trigger**.\n3. Enter a unique Function **Name** and leave the default cron schedule of every 5 minutes, then click **Create**.\n5. Click on **Code + Test** on the left pane. \n6. Copy the [Function App Code](https://aka.ms/sentinel-QualysVM-functioncodeV2) and paste into the Function App `run.ps1` editor.\n7. Click **Save**.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following eight (8) application settings individually, with their respective string values (case-sensitive): \n\t\tapiUsername\n\t\tapiPassword\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\turi\n\t\tfilterParameters\n\t\ttimeInterval\n\t\tlogAnalyticsUri (optional)\n> - Enter the URI that corresponds to your region. The complete list of API Server URLs can be [found here](https://www.qualys.com/docs/qualys-api-vmpc-user-guide.pdf#G4.735348). The `uri` value must follow the following schema: `https:///api/2.0/fo/asset/host/vm/detection/?action=list&vm_processed_after=` -- There is no need to add a time suffix to the URI, the Function App will dynamically append the Time Value to the URI in the proper format.\n> - Add any additional filter parameters, for the `filterParameters` variable, that need to be appended to the URI. Each parameter should be seperated by an \""&\"" symbol and should not include any spaces.\n> - Set the `timeInterval` (in minutes) to the value of `5` to correspond to the Timer Trigger of every `5` minutes. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly to prevent overlapping data ingestion.\n> - Note: If using Azure Key Vault, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}, {""title"": """", ""description"": ""**4. Configure the host.json**.\n\nDue to the potentially large amount of Qualys host detection data being ingested, it can cause the execution time to surpass the default Function App timeout of five (5) minutes. Increase the default timeout duration to the maximum of ten (10) minutes, under the Consumption Plan, to allow more time for the Function App to execute.\n\n1. In the Function App, select the Function App Name and select the **App Service Editor** blade.\n2. Click **Go** to open the editor, then select the **host.json** file under the **wwwroot** directory.\n3. Add the line `\""functionTimeout\"": \""00:10:00\"",` above the `managedDependancy` line \n4. Ensure **SAVED** appears on the top right corner of the editor, then exit the editor.\n\n> NOTE: If a longer timeout duration is required, consider upgrading to an [App Service Plan](https://docs.microsoft.com/azure/azure-functions/functions-scale#timeout)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Qualys API Key"", ""description"": ""A Qualys VM API username and password is required. [See the documentation to learn more about Qualys VM API](https://www.qualys.com/docs/qualys-api-vmpc-user-guide.pdf).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/QualysVM/Data%20Connectors/QualysVM_API_FunctionApp.json","true" -"QualysHostDetection_CL","QualysVM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/QualysVM","azuresentinel","azure-sentinel-qualysvm","2020-12-14","2025-11-18","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","QualysVulnerabilityManagement","Qualys","[DEPRECATED] Qualys Vulnerability Management","The [Qualys Vulnerability Management (VM)](https://www.qualys.com/apps/vulnerability-management/) data connector provides the capability to ingest vulnerability host detection data into Microsoft Sentinel through the Qualys API. The connector provides visibility into host detection data from vulerability scans. This connector provides Microsoft Sentinel the capability to view dashboards, create custom alerts, and improve investigation

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to Qualys VM to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Qualys VM API**\n\n1. Log into the Qualys Vulnerability Management console with an administrator account, select the **Users** tab and the **Users** subtab. \n2. Click on the **New** drop-down menu and select **Users..**\n3. Create a username and password for the API account. \n4. In the **User Roles** tab, ensure the account role is set to **Manager** and access is allowed to **GUI** and **API**\n4. Log out of the administrator account and log into the console with the new API credentials for validation, then log out of the API account. \n5. Log back into the console using an administrator account and modify the API accounts User Roles, removing access to **GUI**. \n6. Save all changes.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Qualys VM connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Qualys VM API Authorization Key(s), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated, if you have previously deployed an earlier version, and want to update, please delete the existing Qualys VM Azure Function before redeploying this version. Please use Qualys V2 version Workbook, detections. ""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Qualys VM connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-QualysVM-azuredeployV2) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-QualysVM-azuredeployV2-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **API Username**, **API Password** , update the **URI**, and any additional URI **Filter Parameters** (each filter should be separated by an \""&\"" symbol, no spaces.) \n> - Enter the URI that corresponds to your region. The complete list of API Server URLs can be [found here](https://www.qualys.com/docs/qualys-api-vmpc-user-guide.pdf#G4.735348) -- There is no need to add a time suffix to the URI, the Function App will dynamically append the Time Value to the URI in the proper format. \n - The default **Time Interval** is set to pull the last five (5) minutes of data. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion. \n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Quayls VM connector manually with Azure Functions.""}, {""title"": """", ""description"": ""**1. Create a Function App**\n\n1. From the Azure Portal, navigate to [Function App](https://portal.azure.com/#blade/HubsExtension/BrowseResource/resourceType/Microsoft.Web%2Fsites/kind/functionapp), and select **+ Add**.\n2. In the **Basics** tab, ensure Runtime stack is set to **Powershell Core**. \n3. In the **Hosting** tab, ensure the **Consumption (Serverless)** plan type is selected.\n4. Make other preferrable configuration changes, if needed, then click **Create**.""}, {""title"": """", ""description"": ""**2. Import Function App Code**\n\n1. In the newly created Function App, select **Functions** on the left pane and click **+ New Function**.\n2. Select **Timer Trigger**.\n3. Enter a unique Function **Name** and leave the default cron schedule of every 5 minutes, then click **Create**.\n5. Click on **Code + Test** on the left pane. \n6. Copy the [Function App Code](https://aka.ms/sentinel-QualysVM-functioncodeV2) and paste into the Function App `run.ps1` editor.\n7. Click **Save**.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following eight (8) application settings individually, with their respective string values (case-sensitive): \n\t\tapiUsername\n\t\tapiPassword\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\turi\n\t\tfilterParameters\n\t\ttimeInterval\n\t\tlogAnalyticsUri (optional)\n> - Enter the URI that corresponds to your region. The complete list of API Server URLs can be [found here](https://www.qualys.com/docs/qualys-api-vmpc-user-guide.pdf#G4.735348). The `uri` value must follow the following schema: `https:///api/2.0/fo/asset/host/vm/detection/?action=list&vm_processed_after=` -- There is no need to add a time suffix to the URI, the Function App will dynamically append the Time Value to the URI in the proper format.\n> - Add any additional filter parameters, for the `filterParameters` variable, that need to be appended to the URI. Each parameter should be seperated by an \""&\"" symbol and should not include any spaces.\n> - Set the `timeInterval` (in minutes) to the value of `5` to correspond to the Timer Trigger of every `5` minutes. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly to prevent overlapping data ingestion.\n> - Note: If using Azure Key Vault, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}, {""title"": """", ""description"": ""**4. Configure the host.json**.\n\nDue to the potentially large amount of Qualys host detection data being ingested, it can cause the execution time to surpass the default Function App timeout of five (5) minutes. Increase the default timeout duration to the maximum of ten (10) minutes, under the Consumption Plan, to allow more time for the Function App to execute.\n\n1. In the Function App, select the Function App Name and select the **App Service Editor** blade.\n2. Click **Go** to open the editor, then select the **host.json** file under the **wwwroot** directory.\n3. Add the line `\""functionTimeout\"": \""00:10:00\"",` above the `managedDependancy` line \n4. Ensure **SAVED** appears on the top right corner of the editor, then exit the editor.\n\n> NOTE: If a longer timeout duration is required, consider upgrading to an [App Service Plan](https://docs.microsoft.com/azure/azure-functions/functions-scale#timeout)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Qualys API Key"", ""description"": ""A Qualys VM API username and password is required. [See the documentation to learn more about Qualys VM API](https://www.qualys.com/docs/qualys-api-vmpc-user-guide.pdf).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/QualysVM/Data%20Connectors/QualysVM_API_FunctionApp.json","true" -"","Quokka","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Quokka","quokka","azure-sentinel-solution-quokka","2025-10-30","","","Quokka","Partner","https://www.quokka.io/contact-us#customer-support","","domains","","","","","","","false","","false" -"QscoutAppEvents_CL","Quokka","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Quokka","quokka","azure-sentinel-solution-quokka","2025-10-30","","","Quokka","Partner","https://www.quokka.io/contact-us#customer-support","","domains","QscoutAppEventsCCFDefinition","Quokka","QscoutAppEventsConnector","Ingest Qscout application events into Microsoft Sentinel","[{""description"": "">**NOTE:** This connector uses Codeless Connector Framework (CCF) to connect to the Qscout app events feed and ingest data into Microsoft Sentinel""}, {""description"": ""Provide the required values below:\n"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Qscout Organization ID"", ""placeholder"": ""123456"", ""type"": ""text"", ""name"": ""organizationId""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Qscout Organization API Key"", ""placeholder"": ""abcdxyz"", ""type"": ""text"", ""name"": ""apiKey""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required"", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true, ""read"": true}}], ""customs"": [{""name"": ""Qscout organization id"", ""description"": ""The API requires your organization ID in Qscout.""}, {""name"": ""Qscout organization API key"", ""description"": ""The API requires your organization API key in Qscout.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Quokka/Data%20Connectors/QuokkaQscoutAppEventsLogs_ccf/QuokkaQscoutAppEventsLogs_connectorDefinition.json","true" -"","RSA SecurID","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RSA%20SecurID","azuresentinel","azure-sentinel-solution-securid","2021-09-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"Syslog","RSA SecurID","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RSA%20SecurID","azuresentinel","azure-sentinel-solution-securid","2021-09-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","RSASecurIDAM","RSA","[Deprecated] RSA® SecurID (Authentication Manager)","The [RSA® SecurID Authentication Manager](https://www.securid.com/) data connector provides the capability to ingest [RSA® SecurID Authentication Manager events](https://community.rsa.com/t5/rsa-authentication-manager/rsa-authentication-manager-log-messages/ta-p/630160) into Microsoft Sentinel. Refer to [RSA® SecurID Authentication Manager documentation](https://community.rsa.com/t5/rsa-authentication-manager/getting-started-with-rsa-authentication-manager/ta-p/569582) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**RSASecurIDAMEvent**](https://aka.ms/sentinel-rsasecuridam-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**NOTE:** This data connector has been developed using RSA SecurID Authentication Manager version: 8.4 and 8.5"", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Server where the RSA\u00ae SecurID Authentication Manager logs are forwarded.\n\n> Logs from RSA\u00ae SecurID Authentication Manager Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure RSA\u00ae SecurID Authentication Manager event forwarding"", ""description"": ""Follow the configuration steps below to get RSA\u00ae SecurID Authentication Manager logs into Microsoft Sentinel.\n1. [Follow these instructions](https://community.rsa.com/t5/rsa-authentication-manager/configure-the-remote-syslog-host-for-real-time-log-monitoring/ta-p/571374) to forward alerts from the Manager to a syslog server.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RSA%20SecurID/Data%20Connectors/RSASecurID.json","true" -"","RSAIDPlus_AdminLogs_Connector","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RSAIDPlus_AdminLogs_Connector","rsasecurity1687281258544","azure-sentinel-solution-rsa_id_plus_admin_log","2025-10-14","","","RSA Support Team","Partner","https://community.rsa.com/","","domains,verticals","","","","","","","false","","false" -"RSAIDPlus_AdminLogs_CL","RSAIDPlus_AdminLogs_Connector","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RSAIDPlus_AdminLogs_Connector","rsasecurity1687281258544","azure-sentinel-solution-rsa_id_plus_admin_log","2025-10-14","","","RSA Support Team","Partner","https://community.rsa.com/","","domains,verticals","RSAIDPlus_AdmingLogs_Connector","RSA","RSA ID Plus Admin Logs Connector","The RSA ID Plus AdminLogs Connector provides the capability to ingest [Cloud Admin Console Audit Events](https://community.rsa.com/s/article/Cloud-Administration-Event-Log-API-5d22ba17) into Microsoft Sentinel using Cloud Admin APIs.","[{""description"": "">**NOTE:** This connector uses Codeless Connector Framework (CCF) to connect to the RSA ID Plus Cloud Admin APIs to pull logs into Microsoft Sentinel.""}, {""title"": ""**STEP 1** - Create Legacy Admin API Client in Cloud Admin Console."", ""description"": ""Follow steps mentioned in this [page](https://community.rsa.com/s/article/Manage-Legacy-Clients-API-Keys-a89c9cbc#).""}, {""title"": ""**STEP 2** - Generate the Base64URL encoded JWT Token."", ""description"": ""Follow the steps mentioned in this [page](https://community.rsa.com/s/article/Authentication-for-the-Cloud-Administration-APIs-a04e3fb9) under the header 'Legacy Administration API'.""}, {""title"": ""**STEP 3** - Configure the Cloud Admin API to start ingesting Admin event logs into Microsoft Sentinel."", ""description"": ""Provide the required values below:\n"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Admin API URL"", ""placeholder"": ""https://.access.securid.com/AdminInterface/restapi/v1/adminlog/exportLogs"", ""type"": ""text"", ""name"": ""Admin-API-URL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""JWT Token"", ""placeholder"": ""Enter your JWT Token"", ""type"": ""password"", ""name"": ""access_token""}}]}, {""title"": ""**STEP 4** - Click Connect"", ""description"": ""Verify all the fields above were filled in correctly. Press Connect to start the connector."", ""instructions"": [{""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""RSA ID Plus API Authentication"", ""description"": ""To access the Admin APIs, a valid Base64URL encoded JWT token, signed with the client's Legacy Administration API key is required.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RSAIDPlus_AdminLogs_Connector/Data%20Connectors/RSIDPlus_AdminLogs_Connector_CCP/RSAIDPlus_AdminLogs_ConnectorDefinition.json","true" -"","Radiflow","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Radiflow","radiflow","azure-sentinel-solution-radiflow","2024-06-26","","","Radiflow","Partner","https://www.radiflow.com","","domains","","","","","","","false","","false" -"CommonSecurityLog","Radiflow","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Radiflow","radiflow","azure-sentinel-solution-radiflow","2024-06-26","","","Radiflow","Partner","https://www.radiflow.com","","domains","RadiflowIsid","Radiflow","Radiflow iSID via AMA","iSID enables non-disruptive monitoring of distributed ICS networks for changes in topology and behavior, using multiple security packages, each offering a unique capability pertaining to a specific type of network activity","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**RadiflowEvent**] which is deployed with the Microsoft Sentinel Solution."", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade.\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule).\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy._\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine."", ""instructions"": []}, {""title"": ""Step B. Configure iSID to send logs using CEF"", ""description"": ""Configure log forwarding using CEF:\n\n1. Navigate to the **System Notifications** section of the Configuration menu.\n\n2. Under Syslog, select **+Add**.\n\n3. In the **New Syslog Server** dialog specify the name, remote server **IP**, **Port**, **Transport** and select **Format** - **CEF**.\n\n4. Press **Apply** to exit the **Add Syslog dialog**."", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python --version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Radiflow/Data%20Connectors/RadiflowIsid.json","true" -"","Rapid7InsightVM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Rapid7InsightVM","azuresentinel","azure-sentinel-solution-rapid7insightvm","2021-07-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"NexposeInsightVMCloud_assets_CL","Rapid7InsightVM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Rapid7InsightVM","azuresentinel","azure-sentinel-solution-rapid7insightvm","2021-07-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","InsightVMCloudAPI","Rapid7","Rapid7 Insight Platform Vulnerability Management Reports","The [Rapid7 Insight VM](https://www.rapid7.com/products/insightvm/) Report data connector provides the capability to ingest Scan reports and vulnerability data into Microsoft Sentinel through the REST API from the Rapid7 Insight platform (Managed in the cloud). Refer to [API documentation](https://docs.rapid7.com/insight/api-overview/) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Insight VM API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parsers based on a Kusto Function to work as expected [**InsightVMAssets**](https://aka.ms/sentinel-InsightVMAssets-parser) and [**InsightVMVulnerabilities**](https://aka.ms/sentinel-InsightVMVulnerabilities-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Insight VM Cloud**\n\n [Follow the instructions](https://docs.rapid7.com/insight/managing-platform-api-keys/) to obtain the credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Rapid7 Insight Vulnerability Management Report data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-InsightVMCloudAPI-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **InsightVMAPIKey**, choose **InsightVMCloudRegion** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Rapid7 Insight Vulnerability Management Report data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the [Azure Function App](https://github.com/averbn/azure_sentinel_data_connectors/raw/main/insight-vm-cloud-azure-sentinel-data-connector/InsightVMCloudAPISentinelConn.zip) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tInsightVMAPIKey\n\t\tInsightVMCloudRegion\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials"", ""description"": ""**InsightVMAPIKey** is required for REST API. [See the documentation to learn more about API](https://docs.rapid7.com/insight/api-overview/). Check all [requirements and follow the instructions](https://docs.rapid7.com/insight/managing-platform-api-keys/) for obtaining credentials""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Rapid7InsightVM/Data%20Connectors/InsightVMCloud_API_FunctionApp.json","true" -"NexposeInsightVMCloud_vulnerabilities_CL","Rapid7InsightVM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Rapid7InsightVM","azuresentinel","azure-sentinel-solution-rapid7insightvm","2021-07-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","InsightVMCloudAPI","Rapid7","Rapid7 Insight Platform Vulnerability Management Reports","The [Rapid7 Insight VM](https://www.rapid7.com/products/insightvm/) Report data connector provides the capability to ingest Scan reports and vulnerability data into Microsoft Sentinel through the REST API from the Rapid7 Insight platform (Managed in the cloud). Refer to [API documentation](https://docs.rapid7.com/insight/api-overview/) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Insight VM API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parsers based on a Kusto Function to work as expected [**InsightVMAssets**](https://aka.ms/sentinel-InsightVMAssets-parser) and [**InsightVMVulnerabilities**](https://aka.ms/sentinel-InsightVMVulnerabilities-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Insight VM Cloud**\n\n [Follow the instructions](https://docs.rapid7.com/insight/managing-platform-api-keys/) to obtain the credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Rapid7 Insight Vulnerability Management Report data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-InsightVMCloudAPI-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **InsightVMAPIKey**, choose **InsightVMCloudRegion** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Rapid7 Insight Vulnerability Management Report data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the [Azure Function App](https://github.com/averbn/azure_sentinel_data_connectors/raw/main/insight-vm-cloud-azure-sentinel-data-connector/InsightVMCloudAPISentinelConn.zip) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tInsightVMAPIKey\n\t\tInsightVMCloudRegion\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials"", ""description"": ""**InsightVMAPIKey** is required for REST API. [See the documentation to learn more about API](https://docs.rapid7.com/insight/api-overview/). Check all [requirements and follow the instructions](https://docs.rapid7.com/insight/managing-platform-api-keys/) for obtaining credentials""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Rapid7InsightVM/Data%20Connectors/InsightVMCloud_API_FunctionApp.json","true" -"","Recorded Future","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Recorded%20Future","recordedfuture1605638642586","recorded_future_sentinel_solution","2021-11-01","2023-09-19","","Recorded Future Support Team","Partner","http://support.recordedfuture.com/","","domains","","","","","","","false","","false" -"","Recorded Future Identity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Recorded%20Future%20Identity","recordedfuture1605638642586","recorded_future_identity_solution","2022-09-06","2025-04-02","","Recorded Future Support Team","Partner","https://support.recordedfuture.com/","","domains","","","","","","","false","","false" -"","Red Canary","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Red%20Canary","Red Canary","microsoft-sentinel-solution-RedCanary","2022-03-04","2022-03-04","","Red Canary","Partner","https://www.redcanary.com","","domains","","","","","","","false","","false" -"RedCanaryDetections_CL","Red Canary","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Red%20Canary","Red Canary","microsoft-sentinel-solution-RedCanary","2022-03-04","2022-03-04","","Red Canary","Partner","https://www.redcanary.com","","domains","RedCanaryDataConnector","Red Canary","Red Canary Threat Detection","The Red Canary data connector provides the capability to ingest published Detections into Microsoft Sentinel using the Data Collector REST API.","[{""title"": """", ""description"": ""Create an Automate Playbook and Trigger as detailed in [this article](https://help.redcanary.com/hc/en-us/articles/4410957523479-Azure-Sentinel). You can skip the **Add analysis rule to Microsoft Sentinel** section; this data connector allows you to import the analysis rule directly into your workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Red%20Canary/Data%20Connectors/RedCanaryDataConnector.json","true" -"","ReversingLabs","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ReversingLabs","reversinglabs1597673283347","rl_offer_content_hub_aoae","2022-08-08","2024-07-17","","ReversingLabs","Partner","https://support.reversinglabs.com/hc/en-us","","domains","","","","","","","false","","false" -"","RidgeSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RidgeSecurity","ridgesecuritytechnologyinc1670890478389","microsoft-sentinel-solution-ridgesecurity","2023-10-23","2023-10-23","","RidgeSecurity","Partner","https://ridgesecurity.ai/about-us/","","domains","","","","","","","false","","false" -"CommonSecurityLog","RidgeSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RidgeSecurity","ridgesecuritytechnologyinc1670890478389","microsoft-sentinel-solution-ridgesecurity","2023-10-23","2023-10-23","","RidgeSecurity","Partner","https://ridgesecurity.ai/about-us/","","domains","RidgeBotDataConnector","RidgeSecurity","[Deprecated] RIDGEBOT - data connector for Microsoft Sentinel","The RidgeBot connector lets users connect RidgeBot with Microsoft Sentinel, allowing creation of Dashboards, Workbooks, Notebooks and Alerts.","[{""title"": """", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine""}, {""title"": ""Step B. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Configure the RidgeBot to forward events to syslog server as described here: https://portal.ridgesecurity.ai/downloadurl/89x72912. Generate some attack events for your application.""}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RidgeSecurity/Data%20Connectors/RidgeSecurity.json","true" -"","RiskIQ","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RiskIQ","azuresentinel","azure-sentinel-solution-riskiq","2021-10-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"","RubrikSecurityCloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RubrikSecurityCloud","rubrik_inc","rubrik_sentinel","2022-07-19","2025-07-25","","Rubrik","Partner","https://support.rubrik.com","","domains","","","","","","","false","","false" -"Rubrik_Anomaly_Data_CL","RubrikSecurityCloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RubrikSecurityCloud","rubrik_inc","rubrik_sentinel","2022-07-19","2025-07-25","","Rubrik","Partner","https://support.rubrik.com","","domains","RubrikSecurityCloudAzureFunctions","Rubrik, Inc","Rubrik Security Cloud data connector","The Rubrik Security Cloud data connector enables security operations teams to integrate insights from Rubrik's Data Observability services into Microsoft Sentinel. The insights include identification of anomalous filesystem behavior associated with ransomware and mass deletion, assess the blast radius of a ransomware attack, and sensitive data operators to prioritize and more rapidly investigate potential incidents.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Rubrik webhook which push its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Rubrik Microsoft Sentinel data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Rubrik connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-RubrikWebhookEvents-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tAnomaliesTableName \n\t\tRansomwareAnalysisTableName \n\t\tThreatHuntsTableName \n\t\tEventsTableName \n\t\tLogLevel \n \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Rubrik Microsoft Sentinel data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-RubrikWebhookEvents-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. RubrikXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tAnomaliesTableName\n\t\tRansomwareAnalysisTableName\n\t\tThreatHuntsTableName\n\t\tEventsTableName\n\t\tLogLevel\n\t\tlogAnalyticsUri (optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: https://.ods.opinsights.azure.us. \n4. Once all application settings have been entered, click **Save**.""}, {""title"": """", ""description"": ""**Post Deployment steps**\n\n""}, {""title"": ""1) Get the Function app endpoint"", ""description"": ""1. Go to Azure function Overview page and Click on **\""Functions\""** tab.\n2. Click on the function called **\""RubrikHttpStarter\""**.\n3. Go to **\""GetFunctionurl\""** and copy the function url.""}, {""title"": ""2) Add a webhook in RubrikSecurityCloud to send data to Microsoft Sentinel."", ""description"": ""Follow the Rubrik User Guide instructions to [Add a Webhook](https://docs.rubrik.com/en-us/saas/saas/common/adding_webhook.html) to begin receiving event information \n 1. Select the Microsoft Sentinel as the webhook Provider \n 2. Enter the desired Webhook name \n 3. Enter the URL part from copied Function-url as the webhook URL endpoint and replace **{functionname}** with **\""RubrikAnomalyOrchestrator\""**, for the Rubrik Microsoft Sentinel Solution \n 4. Select the EventType as Anomaly \n 5. Select the following severity levels: Critical, Warning, Informational \n 6. Choose multiple log types, if desired, when running **\""RubrikEventsOrchestrator\""** \n 7. Repeat the same steps to add webhooks for Anomaly Detection Analysis, Threat Hunt and Other Events.\n \n\n NOTE: while adding webhooks for Anomaly Detection Analysis, Threat Hunt and Other Events, replace **{functionname}** with **\""RubrikRansomwareOrchestrator\""**, **\""RubrikThreatHuntOrchestrator\""** and **\""RubrikEventsOrchestrator\""** respectively in copied function-url.""}, {""title"": """", ""description"": ""*Now we are done with the rubrik Webhook configuration. Once the webhook events triggered , you should be able to see the Anomaly, Anomaly Detection Analysis, Threat Hunt events and Other Events from the Rubrik into respective LogAnalytics workspace table called \""Rubrik_Anomaly_Data_CL\"", \""Rubrik_Ransomware_Data_CL\"", \""Rubrik_ThreatHunt_Data_CL\"", and \""Rubrik_Events_Data_CL\"".*\n\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RubrikSecurityCloud/Data%20Connectors/RubrikWebhookEvents/RubrikWebhookEvents_FunctionApp.json","true" -"Rubrik_Events_Data_CL","RubrikSecurityCloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RubrikSecurityCloud","rubrik_inc","rubrik_sentinel","2022-07-19","2025-07-25","","Rubrik","Partner","https://support.rubrik.com","","domains","RubrikSecurityCloudAzureFunctions","Rubrik, Inc","Rubrik Security Cloud data connector","The Rubrik Security Cloud data connector enables security operations teams to integrate insights from Rubrik's Data Observability services into Microsoft Sentinel. The insights include identification of anomalous filesystem behavior associated with ransomware and mass deletion, assess the blast radius of a ransomware attack, and sensitive data operators to prioritize and more rapidly investigate potential incidents.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Rubrik webhook which push its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Rubrik Microsoft Sentinel data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Rubrik connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-RubrikWebhookEvents-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tAnomaliesTableName \n\t\tRansomwareAnalysisTableName \n\t\tThreatHuntsTableName \n\t\tEventsTableName \n\t\tLogLevel \n \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Rubrik Microsoft Sentinel data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-RubrikWebhookEvents-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. RubrikXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tAnomaliesTableName\n\t\tRansomwareAnalysisTableName\n\t\tThreatHuntsTableName\n\t\tEventsTableName\n\t\tLogLevel\n\t\tlogAnalyticsUri (optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: https://.ods.opinsights.azure.us. \n4. Once all application settings have been entered, click **Save**.""}, {""title"": """", ""description"": ""**Post Deployment steps**\n\n""}, {""title"": ""1) Get the Function app endpoint"", ""description"": ""1. Go to Azure function Overview page and Click on **\""Functions\""** tab.\n2. Click on the function called **\""RubrikHttpStarter\""**.\n3. Go to **\""GetFunctionurl\""** and copy the function url.""}, {""title"": ""2) Add a webhook in RubrikSecurityCloud to send data to Microsoft Sentinel."", ""description"": ""Follow the Rubrik User Guide instructions to [Add a Webhook](https://docs.rubrik.com/en-us/saas/saas/common/adding_webhook.html) to begin receiving event information \n 1. Select the Microsoft Sentinel as the webhook Provider \n 2. Enter the desired Webhook name \n 3. Enter the URL part from copied Function-url as the webhook URL endpoint and replace **{functionname}** with **\""RubrikAnomalyOrchestrator\""**, for the Rubrik Microsoft Sentinel Solution \n 4. Select the EventType as Anomaly \n 5. Select the following severity levels: Critical, Warning, Informational \n 6. Choose multiple log types, if desired, when running **\""RubrikEventsOrchestrator\""** \n 7. Repeat the same steps to add webhooks for Anomaly Detection Analysis, Threat Hunt and Other Events.\n \n\n NOTE: while adding webhooks for Anomaly Detection Analysis, Threat Hunt and Other Events, replace **{functionname}** with **\""RubrikRansomwareOrchestrator\""**, **\""RubrikThreatHuntOrchestrator\""** and **\""RubrikEventsOrchestrator\""** respectively in copied function-url.""}, {""title"": """", ""description"": ""*Now we are done with the rubrik Webhook configuration. Once the webhook events triggered , you should be able to see the Anomaly, Anomaly Detection Analysis, Threat Hunt events and Other Events from the Rubrik into respective LogAnalytics workspace table called \""Rubrik_Anomaly_Data_CL\"", \""Rubrik_Ransomware_Data_CL\"", \""Rubrik_ThreatHunt_Data_CL\"", and \""Rubrik_Events_Data_CL\"".*\n\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RubrikSecurityCloud/Data%20Connectors/RubrikWebhookEvents/RubrikWebhookEvents_FunctionApp.json","true" -"Rubrik_Ransomware_Data_CL","RubrikSecurityCloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RubrikSecurityCloud","rubrik_inc","rubrik_sentinel","2022-07-19","2025-07-25","","Rubrik","Partner","https://support.rubrik.com","","domains","RubrikSecurityCloudAzureFunctions","Rubrik, Inc","Rubrik Security Cloud data connector","The Rubrik Security Cloud data connector enables security operations teams to integrate insights from Rubrik's Data Observability services into Microsoft Sentinel. The insights include identification of anomalous filesystem behavior associated with ransomware and mass deletion, assess the blast radius of a ransomware attack, and sensitive data operators to prioritize and more rapidly investigate potential incidents.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Rubrik webhook which push its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Rubrik Microsoft Sentinel data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Rubrik connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-RubrikWebhookEvents-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tAnomaliesTableName \n\t\tRansomwareAnalysisTableName \n\t\tThreatHuntsTableName \n\t\tEventsTableName \n\t\tLogLevel \n \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Rubrik Microsoft Sentinel data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-RubrikWebhookEvents-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. RubrikXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tAnomaliesTableName\n\t\tRansomwareAnalysisTableName\n\t\tThreatHuntsTableName\n\t\tEventsTableName\n\t\tLogLevel\n\t\tlogAnalyticsUri (optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: https://.ods.opinsights.azure.us. \n4. Once all application settings have been entered, click **Save**.""}, {""title"": """", ""description"": ""**Post Deployment steps**\n\n""}, {""title"": ""1) Get the Function app endpoint"", ""description"": ""1. Go to Azure function Overview page and Click on **\""Functions\""** tab.\n2. Click on the function called **\""RubrikHttpStarter\""**.\n3. Go to **\""GetFunctionurl\""** and copy the function url.""}, {""title"": ""2) Add a webhook in RubrikSecurityCloud to send data to Microsoft Sentinel."", ""description"": ""Follow the Rubrik User Guide instructions to [Add a Webhook](https://docs.rubrik.com/en-us/saas/saas/common/adding_webhook.html) to begin receiving event information \n 1. Select the Microsoft Sentinel as the webhook Provider \n 2. Enter the desired Webhook name \n 3. Enter the URL part from copied Function-url as the webhook URL endpoint and replace **{functionname}** with **\""RubrikAnomalyOrchestrator\""**, for the Rubrik Microsoft Sentinel Solution \n 4. Select the EventType as Anomaly \n 5. Select the following severity levels: Critical, Warning, Informational \n 6. Choose multiple log types, if desired, when running **\""RubrikEventsOrchestrator\""** \n 7. Repeat the same steps to add webhooks for Anomaly Detection Analysis, Threat Hunt and Other Events.\n \n\n NOTE: while adding webhooks for Anomaly Detection Analysis, Threat Hunt and Other Events, replace **{functionname}** with **\""RubrikRansomwareOrchestrator\""**, **\""RubrikThreatHuntOrchestrator\""** and **\""RubrikEventsOrchestrator\""** respectively in copied function-url.""}, {""title"": """", ""description"": ""*Now we are done with the rubrik Webhook configuration. Once the webhook events triggered , you should be able to see the Anomaly, Anomaly Detection Analysis, Threat Hunt events and Other Events from the Rubrik into respective LogAnalytics workspace table called \""Rubrik_Anomaly_Data_CL\"", \""Rubrik_Ransomware_Data_CL\"", \""Rubrik_ThreatHunt_Data_CL\"", and \""Rubrik_Events_Data_CL\"".*\n\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RubrikSecurityCloud/Data%20Connectors/RubrikWebhookEvents/RubrikWebhookEvents_FunctionApp.json","true" -"Rubrik_ThreatHunt_Data_CL","RubrikSecurityCloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RubrikSecurityCloud","rubrik_inc","rubrik_sentinel","2022-07-19","2025-07-25","","Rubrik","Partner","https://support.rubrik.com","","domains","RubrikSecurityCloudAzureFunctions","Rubrik, Inc","Rubrik Security Cloud data connector","The Rubrik Security Cloud data connector enables security operations teams to integrate insights from Rubrik's Data Observability services into Microsoft Sentinel. The insights include identification of anomalous filesystem behavior associated with ransomware and mass deletion, assess the blast radius of a ransomware attack, and sensitive data operators to prioritize and more rapidly investigate potential incidents.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Rubrik webhook which push its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Rubrik Microsoft Sentinel data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Rubrik connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-RubrikWebhookEvents-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tAnomaliesTableName \n\t\tRansomwareAnalysisTableName \n\t\tThreatHuntsTableName \n\t\tEventsTableName \n\t\tLogLevel \n \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Rubrik Microsoft Sentinel data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-RubrikWebhookEvents-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. RubrikXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tAnomaliesTableName\n\t\tRansomwareAnalysisTableName\n\t\tThreatHuntsTableName\n\t\tEventsTableName\n\t\tLogLevel\n\t\tlogAnalyticsUri (optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: https://.ods.opinsights.azure.us. \n4. Once all application settings have been entered, click **Save**.""}, {""title"": """", ""description"": ""**Post Deployment steps**\n\n""}, {""title"": ""1) Get the Function app endpoint"", ""description"": ""1. Go to Azure function Overview page and Click on **\""Functions\""** tab.\n2. Click on the function called **\""RubrikHttpStarter\""**.\n3. Go to **\""GetFunctionurl\""** and copy the function url.""}, {""title"": ""2) Add a webhook in RubrikSecurityCloud to send data to Microsoft Sentinel."", ""description"": ""Follow the Rubrik User Guide instructions to [Add a Webhook](https://docs.rubrik.com/en-us/saas/saas/common/adding_webhook.html) to begin receiving event information \n 1. Select the Microsoft Sentinel as the webhook Provider \n 2. Enter the desired Webhook name \n 3. Enter the URL part from copied Function-url as the webhook URL endpoint and replace **{functionname}** with **\""RubrikAnomalyOrchestrator\""**, for the Rubrik Microsoft Sentinel Solution \n 4. Select the EventType as Anomaly \n 5. Select the following severity levels: Critical, Warning, Informational \n 6. Choose multiple log types, if desired, when running **\""RubrikEventsOrchestrator\""** \n 7. Repeat the same steps to add webhooks for Anomaly Detection Analysis, Threat Hunt and Other Events.\n \n\n NOTE: while adding webhooks for Anomaly Detection Analysis, Threat Hunt and Other Events, replace **{functionname}** with **\""RubrikRansomwareOrchestrator\""**, **\""RubrikThreatHuntOrchestrator\""** and **\""RubrikEventsOrchestrator\""** respectively in copied function-url.""}, {""title"": """", ""description"": ""*Now we are done with the rubrik Webhook configuration. Once the webhook events triggered , you should be able to see the Anomaly, Anomaly Detection Analysis, Threat Hunt events and Other Events from the Rubrik into respective LogAnalytics workspace table called \""Rubrik_Anomaly_Data_CL\"", \""Rubrik_Ransomware_Data_CL\"", \""Rubrik_ThreatHunt_Data_CL\"", and \""Rubrik_Events_Data_CL\"".*\n\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RubrikSecurityCloud/Data%20Connectors/RubrikWebhookEvents/RubrikWebhookEvents_FunctionApp.json","true" -"","SAP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SAP","","","","","","","","","","","","","","","","","false","","false" -"","SAP BTP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SAP%20BTP","sentinel4sap","sap_btp_sentinel_solution","2023-04-04","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"SAPBTPAuditLog_CL","SAP BTP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SAP%20BTP","sentinel4sap","sap_btp_sentinel_solution","2023-04-04","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","SAPBTPAuditEvents","Microsoft","SAP BTP","SAP Business Technology Platform (SAP BTP) brings together data management, analytics, artificial intelligence, application development, automation, and integration in one, unified environment.","[{""description"": ""**Step 1 - Configuration steps for the SAP BTP Audit Retrieval API**\n\nFollow the steps provided by SAP [see Audit Log Retrieval API for Global Accounts in the Cloud Foundry Environment](https://help.sap.com/docs/btp/sap-business-technology-platform/audit-log-retrieval-api-for-global-accounts-in-cloud-foundry-environment/). Take a note of the **url** (Audit Retrieval API URL), **uaa.url** (User Account and Authentication Server url) and the associated **uaa.clientid**.\n\n>**NOTE:** You can onboard one or more BTP subaccounts by following the steps provided by SAP [see Audit Log Retrieval API Usage for Subaccounts in the Cloud Foundry Environment](https://help.sap.com/docs/btp/sap-business-technology-platform/audit-log-retrieval-api-usage-for-subaccounts-in-cloud-foundry-environment/). Add a connection for each subaccount.""}, {""description"": ""Connect using OAuth client credentials"", ""title"": ""Connect events from SAP BTP to Microsoft Sentinel"", ""instructions"": [{""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""label"": ""Add account"", ""isPrimary"": true, ""title"": ""BTP connection"", ""instructionSteps"": [{""title"": ""Account Details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Subaccount name (e.g. Contoso). This will be projected to the InstanceName column."", ""placeholder"": ""no space or special character allowed!"", ""type"": ""text"", ""name"": ""subaccountName""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""SAP BTP Client ID"", ""placeholder"": ""Client ID"", ""type"": ""text"", ""name"": ""clientId""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""SAP BTP Client Secret"", ""placeholder"": ""Client Secret"", ""type"": ""password"", ""name"": ""clientSecret""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Authorization server URL (UAA server)"", ""placeholder"": ""https://your-tenant.authentication.region.hana.ondemand.com"", ""type"": ""text"", ""name"": ""authServerUrl""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Audit Retrieval API URL"", ""placeholder"": ""https://auditlog-management.cfapps.region.hana.ondemand.com"", ""type"": ""text"", ""name"": ""auditHost""}}]}]}}]}, {""title"": ""Subaccounts"", ""description"": ""Each row represents a connected subaccount"", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Subaccount Name"", ""columnValue"": ""name""}], ""menuItems"": [""DeleteConnector""]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Client Id and Client Secret for Audit Retrieval API"", ""description"": ""Enable API access in BTP.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SAP%20BTP/Data%20Connectors/SAPBTPPollerConnector/SAPBTP_DataConnectorDefinition.json","true" -"","SAP ETD Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SAP%20ETD%20Cloud","sap_jasondau","azure-sentinel-solution-sapetd","2025-02-17","2025-09-11","","SAP","Partner","https://help.sap.com/docs/SAP_ENTERPRISE_THREAT_DETECTION_CLOUD_EDITION","","domains","","","","","","","false","","false" -"SAPETDAlerts_CL","SAP ETD Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SAP%20ETD%20Cloud","sap_jasondau","azure-sentinel-solution-sapetd","2025-02-17","2025-09-11","","SAP","Partner","https://help.sap.com/docs/SAP_ENTERPRISE_THREAT_DETECTION_CLOUD_EDITION","","domains","SAPETDAlerts","SAP","SAP Enterprise Threat Detection, cloud edition","The SAP Enterprise Threat Detection, cloud edition (ETD) data connector enables ingestion of security alerts from ETD into Microsoft Sentinel, supporting cross-correlation, alerting, and threat hunting.","[{""description"": ""**Step 1 - Configuration steps for the SAP ETD Audit Retrieval API**\n\nFollow the steps provided by SAP [see ETD docs](https://help.sap.com/docs/ETD/sap-business-technology-platform/audit-log-retrieval-api-for-global-accounts-in-cloud-foundry-environment/). Take a note of the **url** (Audit Retrieval API URL), **uaa.url** (User Account and Authentication Server url) and the associated **uaa.clientid**.\n\n>**NOTE:** You can onboard one or more ETD subaccounts by following the steps provided by SAP [see ETD docs](https://help.sap.com/docs/ETD/sap-business-technology-platform/audit-log-retrieval-api-usage-for-subaccounts-in-cloud-foundry-environment/). Add a connection for each subaccount.\n\n>**TIP:** Use the [shared blog series](https://community.sap.com/t5/enterprise-resource-planning-blog-posts-by-sap/sap-enterprise-threat-detection-cloud-edition-joins-forces-with-microsoft/ba-p/13942075) for additional info.""}, {""description"": ""Connect using OAuth client credentials"", ""title"": ""Connect events from SAP ETD to Microsoft Sentinel"", ""instructions"": [{""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""label"": ""Add account"", ""isPrimary"": true, ""title"": ""ETD connection"", ""instructionSteps"": [{""title"": ""Account Details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""SAP ETD Client ID"", ""placeholder"": ""Client ID"", ""type"": ""text"", ""name"": ""clientId""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""SAP ETD Client Secret"", ""placeholder"": ""Client Secret"", ""type"": ""password"", ""name"": ""clientSecret""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Authorization server URL (UAA server)"", ""placeholder"": ""https://your-tenant.authentication.region.hana.ondemand.com/oauth/token"", ""type"": ""text"", ""name"": ""authServerUrl""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""SAP ETD data retrieval API URL"", ""placeholder"": ""https://your-etd-cloud-data-retrieval-service.cfapps.region.hana.ondemand.com"", ""type"": ""text"", ""name"": ""etdHost""}}]}]}}]}, {""title"": ""ETD accounts"", ""description"": ""Each row represents a connected ETD account"", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Data retrieval endpoint"", ""columnValue"": ""properties.request.apiEndpoint""}], ""menuItems"": [""DeleteConnector""]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Client Id and Client Secret for ETD Retrieval API"", ""description"": ""Enable API access in ETD.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SAP%20ETD%20Cloud/Data%20Connectors/SAPETD_PUSH_CCP/SAPETD_connectorDefinition.json","true" -"SAPETDInvestigations_CL","SAP ETD Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SAP%20ETD%20Cloud","sap_jasondau","azure-sentinel-solution-sapetd","2025-02-17","2025-09-11","","SAP","Partner","https://help.sap.com/docs/SAP_ENTERPRISE_THREAT_DETECTION_CLOUD_EDITION","","domains","SAPETDAlerts","SAP","SAP Enterprise Threat Detection, cloud edition","The SAP Enterprise Threat Detection, cloud edition (ETD) data connector enables ingestion of security alerts from ETD into Microsoft Sentinel, supporting cross-correlation, alerting, and threat hunting.","[{""description"": ""**Step 1 - Configuration steps for the SAP ETD Audit Retrieval API**\n\nFollow the steps provided by SAP [see ETD docs](https://help.sap.com/docs/ETD/sap-business-technology-platform/audit-log-retrieval-api-for-global-accounts-in-cloud-foundry-environment/). Take a note of the **url** (Audit Retrieval API URL), **uaa.url** (User Account and Authentication Server url) and the associated **uaa.clientid**.\n\n>**NOTE:** You can onboard one or more ETD subaccounts by following the steps provided by SAP [see ETD docs](https://help.sap.com/docs/ETD/sap-business-technology-platform/audit-log-retrieval-api-usage-for-subaccounts-in-cloud-foundry-environment/). Add a connection for each subaccount.\n\n>**TIP:** Use the [shared blog series](https://community.sap.com/t5/enterprise-resource-planning-blog-posts-by-sap/sap-enterprise-threat-detection-cloud-edition-joins-forces-with-microsoft/ba-p/13942075) for additional info.""}, {""description"": ""Connect using OAuth client credentials"", ""title"": ""Connect events from SAP ETD to Microsoft Sentinel"", ""instructions"": [{""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""label"": ""Add account"", ""isPrimary"": true, ""title"": ""ETD connection"", ""instructionSteps"": [{""title"": ""Account Details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""SAP ETD Client ID"", ""placeholder"": ""Client ID"", ""type"": ""text"", ""name"": ""clientId""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""SAP ETD Client Secret"", ""placeholder"": ""Client Secret"", ""type"": ""password"", ""name"": ""clientSecret""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Authorization server URL (UAA server)"", ""placeholder"": ""https://your-tenant.authentication.region.hana.ondemand.com/oauth/token"", ""type"": ""text"", ""name"": ""authServerUrl""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""SAP ETD data retrieval API URL"", ""placeholder"": ""https://your-etd-cloud-data-retrieval-service.cfapps.region.hana.ondemand.com"", ""type"": ""text"", ""name"": ""etdHost""}}]}]}}]}, {""title"": ""ETD accounts"", ""description"": ""Each row represents a connected ETD account"", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Data retrieval endpoint"", ""columnValue"": ""properties.request.apiEndpoint""}], ""menuItems"": [""DeleteConnector""]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Client Id and Client Secret for ETD Retrieval API"", ""description"": ""Enable API access in ETD.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SAP%20ETD%20Cloud/Data%20Connectors/SAPETD_PUSH_CCP/SAPETD_connectorDefinition.json","true" -"","SAP LogServ","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SAP%20LogServ","sap_jasondau","azure-sentinel-solution-saplogserv","2025-02-17","2025-07-18","","SAP","Partner","https://community.sap.com/t5/enterprise-resource-planning-blogs-by-sap/announcing-limited-preview-of-sap-logserv-integration-with-microsoft/ba-p/13942180","","domains","","","","","","","false","","false" -"SAPLogServ_CL","SAP LogServ","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SAP%20LogServ","sap_jasondau","azure-sentinel-solution-saplogserv","2025-02-17","2025-07-18","","SAP","Partner","https://community.sap.com/t5/enterprise-resource-planning-blogs-by-sap/announcing-limited-preview-of-sap-logserv-integration-with-microsoft/ba-p/13942180","","domains","SAPLogServ","SAP SE","SAP LogServ (RISE), S/4HANA Cloud private edition","SAP LogServ is an SAP Enterprise Cloud Services (ECS) service aimed at collection, storage, forwarding and access of logs. LogServ centralizes the logs from all systems, applications, and ECS services used by a registered customer.
Main Features include:
Near Realtime Log Collection: With ability to integrate into Microsoft Sentinel as SIEM solution.
LogServ complements the existing SAP application layer threat monitoring and detections in Microsoft Sentinel with the log types owned by SAP ECS as the system provider. This includes logs like: SAP Security Audit Log (AS ABAP), HANA database, AS JAVA, ICM, SAP Web Dispatcher, SAP Cloud Connector, OS, SAP Gateway, 3rd party Database, Network, DNS, Proxy, Firewall","[{""title"": ""1. Create ARM Resources and Provide the Required Permissions"", ""description"": ""We will create data collection rule (DCR) and data collection endpoint (DCE) resources. We will also create a Microsoft Entra app registration and assign the required permissions to it."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Automated deployment of Azure resources\nClicking on \""Deploy push connector resources\"" will trigger the creation of DCR and DCE resources.\nIt will then create a Microsoft Entra app registration with client secret and grant permissions on the DCR. This setup enables data to be sent securely to the DCR using a OAuth v2 client credentials.""}}, {""parameters"": {""label"": ""Deploy push connector resources"", ""applicationDisplayName"": ""SAP LogServ push to Microsoft Sentinel""}, ""type"": ""DeployPushConnectorButton_test""}]}, {""title"": ""2. Maintain the data collection endpoint details and authentication info in SAP LogServ"", ""description"": ""Share the data collection endpoint URL and authentication info with the SAP LogServ administrator to configure the SAP LogServ to send data to the data collection endpoint.\n\nLearn more from [this blog series](https://community.sap.com/t5/enterprise-resource-planning-blog-posts-by-members/ultimate-blog-series-sap-logserv-integration-with-microsoft-sentinel/ba-p/14126401)."", ""instructions"": [{""parameters"": {""label"": ""Use this value to configure as Tenant ID in the LogIngestionAPI credential."", ""fillWith"": [""TenantId""]}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra Application ID"", ""fillWith"": [""ApplicationId""], ""placeholder"": ""Deploy push connector to get the Application ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra Application Secret"", ""fillWith"": [""ApplicationSecret""], ""placeholder"": ""Deploy push connector to get the Application Secret""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Use this value to configure the LogsIngestionURL parameter when deploying the IFlow."", ""fillWith"": [""DataCollectionEndpoint""], ""placeholder"": ""Deploy push connector to get the DCE URI""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""DCR Immutable ID"", ""fillWith"": [""DataCollectionRuleId""], ""placeholder"": ""Deploy push connector to get the DCR ID""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft Entra"", ""description"": ""Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher.""}, {""name"": ""Microsoft Azure"", ""description"": ""Permission to assign Monitoring Metrics Publisher role on data collection rules. Typically requires Azure RBAC Owner or User Access Administrator role.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SAP%20LogServ/Data%20Connectors/SAPLogServ.json;https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SAP%20LogServ/Data%20Connectors/SAPLogServ_PUSH_CCP/SAPLogServ_connectorDefinition.json","false" -"","SAP S4 Cloud Public Edition","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SAP%20S4%20Cloud%20Public%20Edition","sap_jasondau","azure-sentinel-solution-s4hana-public","2025-09-12","","","SAP","Partner","https://api.sap.com/api/SecurityAuditLog_ODataService/overview","","domains","","","","","","","false","","false" -"ABAPAuditLog","SAP S4 Cloud Public Edition","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SAP%20S4%20Cloud%20Public%20Edition","sap_jasondau","azure-sentinel-solution-s4hana-public","2025-09-12","","","SAP","Partner","https://api.sap.com/api/SecurityAuditLog_ODataService/overview","","domains","SAPS4PublicAlerts","SAP","SAP S/4HANA Cloud Public Edition","The SAP S/4HANA Cloud Public Edition (GROW with SAP) data connector enables ingestion of SAP's security audit log into the Microsoft Sentinel Solution for SAP, supporting cross-correlation, alerting, and threat hunting. Looking for alternative authentication mechanisms? See [here](https://github.com/Azure-Samples/Sentinel-For-SAP-Community/tree/main/integration-artifacts).","[{""description"": ""**Step 1 - Configuration steps for SAP S/4HANA Cloud Public Edition**\n\nTo connect to SAP S/4HANA Cloud Public Edition, you will need:\n\n1. Configure a communication arrangement for communication scenario **[SAP_COM_0750](https://help.sap.com/docs/SAP_S4HANA_CLOUD/0f69f8fb28ac4bf48d2b57b9637e81fa/a93dca70e2ce43d19ac93e3e5531e37d.html)** \n\n2. SAP S/4HANA Cloud Public Edition tenant **API URL**\n3. Valid **communication user (username and password)** for your SAP S/4HANA Cloud system\n4. **Appropriate authorizations** to access audit log data via OData services\n\n>**NOTE:** This connector supports Basic authentication. Looking for alternative authentication mechanisms? See [here](https://github.com/Azure-Samples/Sentinel-For-SAP-Community/tree/main/integration-artifacts)""}, {""description"": ""Connect using Basic authentication"", ""title"": ""Connect events from SAP S/4HANA Cloud Public Edition to Microsoft Sentinel Solution for SAP"", ""instructions"": [{""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""label"": ""Add account"", ""isPrimary"": true, ""title"": ""S/4HANA Cloud Public Edition connection"", ""instructionSteps"": [{""title"": ""Account Details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Username"", ""placeholder"": ""Enter your SAP S/4HANA Cloud username"", ""type"": ""text"", ""name"": ""username""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Password"", ""placeholder"": ""Enter your SAP S/4HANA Cloud password"", ""type"": ""password"", ""name"": ""password""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""SAP S/4HANA Cloud API URL"", ""placeholder"": ""https://my123456-api.s4hana.cloud.sap"", ""type"": ""text"", ""name"": ""s4hanaHost""}}]}]}}]}, {""title"": ""S/4HANA Cloud Public Edition connections"", ""description"": ""Each row represents a connected S/4HANA Cloud Public Edition system"", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""S/4HANA Cloud API endpoint"", ""columnValue"": ""properties.request.apiEndpoint""}], ""menuItems"": [""DeleteConnector""]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Client Id and Client Secret for Audit Retrieval API"", ""description"": ""Enable API access in BTP.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SAP%20S4%20Cloud%20Public%20Edition/Data%20Connectors/SAPS4PublicPollerConnector/SAPS4Public_connectorDefinition.json","true" -"","SIGNL4","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SIGNL4","derdack","azure-sentinel-solution-signl4","2021-12-10","2021-12-10","","Derdack","Partner","https://www.signl4.com","","domains","","","","","","","false","","false" -"SIGNL4_CL","SIGNL4","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SIGNL4","derdack","azure-sentinel-solution-signl4","2021-12-10","2021-12-10","","Derdack","Partner","https://www.signl4.com","","domains","DerdackSIGNL4","Derdack","Derdack SIGNL4","When critical systems fail or security incidents happen, SIGNL4 bridges the ‘last mile’ to your staff, engineers, IT admins and workers in the field. It adds real-time mobile alerting to your services, systems, and processes in no time. SIGNL4 notifies through persistent mobile push, SMS text and voice calls with acknowledgement, tracking and escalation. Integrated duty and shift scheduling ensure the right people are alerted at the right time.

[Learn more >](https://www.signl4.com)","[{""title"": """", ""description"": "">**NOTE:** This data connector is mainly configured on the SIGNL4 side. You can find a description video here: [**Integrate SIGNL4 with Microsoft Sentinel**](https://www.signl4.com/blog/portfolio_item/azure-sentinel-mobile-alert-notification-duty-schedule-escalation/)."", ""instructions"": []}, {""title"": """", ""description"": "">**SIGNL4 Connector:** The SIGNL4 connector for Microsoft Sentinel, Azure Security Center and other Azure Graph Security API providers provides seamless 2-way integration with your Azure Security solutions. Once added to your SIGNL4 team, the connector will read security alerts from Azure Graph Security API and fully automatically and trigger alert notifications to your team members on duty. It will also synchronize the alert status from SIGNL4 to Graph Security API, so that if alerts are acknowledged or closed, this status is also updated on the according Azure Graph Security API alert or the corresponding security provider. As mentioned, the connector mainly uses Azure Graph Security API, but for some security providers, such as Microsoft Sentinel, it also uses dedicated REST APIs from according Azure solutions."", ""instructions"": []}, {""title"": ""Microsoft Sentinel Features"", ""description"": ""Microsoft Sentinel is a cloud native SIEM solution from Microsoft and a security alert provider in Azure Graph Security API. However, the level of alert details available with the Graph Security API is limited for Microsoft Sentinel. The connector can therefore augment alerts with further details (insights rule search results), from the underlying Microsoft Sentinel Log Analytics workspace. To be able to do that, the connector communicates with Azure Log Analytics REST API and needs according permissions (see below). Furthermore, the app can also update the status of Microsoft Sentinel incidents, when all related security alerts are e.g. in progress or resolved. In order to be able to do that, the connector needs to be a member of the 'Microsoft Sentinel Contributors' group in your Azure Subscription.\n **Automated deployment in Azure**\n The credentials required to access the beforementioned APIs, are generated by a small PowerShell script that you can download below. The script performs the following tasks for you:\n - Logs you on to your Azure Subscription (please login with an administrator account)\n - Creates a new enterprise application for this connector in your Azure AD, also referred to as service principal\n - Creates a new role in your Azure IAM that grants read/query permission to only Azure Log Analytics workspaces.\n - Joins the enterprise application to that user role\n - Joins the enterprise application to the 'Microsoft Sentinel Contributors' role\n - Outputs some data that you need to configure app (see below)"", ""instructions"": []}, {""title"": ""Deployment procedure"", ""description"": ""1. Download the PowerShell deployment script from [here](https://github.com/signl4/signl4-integration-azuresentinel/blob/master/registerSIGNL4Client.ps1).\n2. Review the script and the roles and permission scopes it deploys for the new app registration. If you don't want to use the connector with Microsoft Sentinel, you could remove all role creation and role assignment code and only use it to create the app registration (SPN) in your Azure Active Directory.\n3. Run the script. At the end it outputs information that you need to enter in the connector app configuration.\n4. In Azure AD, click on 'App Registrations'. Find the app with the name 'SIGNL4AzureSecurity' and open its details\n5. On the left menu blade click 'API Permissions'. Then click 'Add a permission'.\n6. On the blade that loads, under 'Microsoft APIs' click on the 'Microsoft Graph' tile, then click 'App permission'.\n7. In the table that is displayed expand 'SecurityEvents' and check 'SecurityEvents.Read.All' and 'SecurityEvents.ReadWrite.All'.\n8. Click 'Add permissions'."", ""instructions"": []}, {""title"": ""Configuring the SIGNL4 connector app"", ""description"": ""Finally, enter the IDs, that the script has outputted in the connector configuration:\n - Azure Tenant ID\n - Azure Subscription ID\n - Client ID (of the enterprise application)\n - Client Secret (of the enterprise application)\n Once the app is enabled, it will start reading your Azure Graph Security API alerts.\n\n>**NOTE:** It will initially only read the alerts that have occurred within the last 24 hours."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SIGNL4/Data%20Connectors/DerdackSIGNL4.json","true" -"SecurityIncident","SIGNL4","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SIGNL4","derdack","azure-sentinel-solution-signl4","2021-12-10","2021-12-10","","Derdack","Partner","https://www.signl4.com","","domains","DerdackSIGNL4","Derdack","Derdack SIGNL4","When critical systems fail or security incidents happen, SIGNL4 bridges the ‘last mile’ to your staff, engineers, IT admins and workers in the field. It adds real-time mobile alerting to your services, systems, and processes in no time. SIGNL4 notifies through persistent mobile push, SMS text and voice calls with acknowledgement, tracking and escalation. Integrated duty and shift scheduling ensure the right people are alerted at the right time.

[Learn more >](https://www.signl4.com)","[{""title"": """", ""description"": "">**NOTE:** This data connector is mainly configured on the SIGNL4 side. You can find a description video here: [**Integrate SIGNL4 with Microsoft Sentinel**](https://www.signl4.com/blog/portfolio_item/azure-sentinel-mobile-alert-notification-duty-schedule-escalation/)."", ""instructions"": []}, {""title"": """", ""description"": "">**SIGNL4 Connector:** The SIGNL4 connector for Microsoft Sentinel, Azure Security Center and other Azure Graph Security API providers provides seamless 2-way integration with your Azure Security solutions. Once added to your SIGNL4 team, the connector will read security alerts from Azure Graph Security API and fully automatically and trigger alert notifications to your team members on duty. It will also synchronize the alert status from SIGNL4 to Graph Security API, so that if alerts are acknowledged or closed, this status is also updated on the according Azure Graph Security API alert or the corresponding security provider. As mentioned, the connector mainly uses Azure Graph Security API, but for some security providers, such as Microsoft Sentinel, it also uses dedicated REST APIs from according Azure solutions."", ""instructions"": []}, {""title"": ""Microsoft Sentinel Features"", ""description"": ""Microsoft Sentinel is a cloud native SIEM solution from Microsoft and a security alert provider in Azure Graph Security API. However, the level of alert details available with the Graph Security API is limited for Microsoft Sentinel. The connector can therefore augment alerts with further details (insights rule search results), from the underlying Microsoft Sentinel Log Analytics workspace. To be able to do that, the connector communicates with Azure Log Analytics REST API and needs according permissions (see below). Furthermore, the app can also update the status of Microsoft Sentinel incidents, when all related security alerts are e.g. in progress or resolved. In order to be able to do that, the connector needs to be a member of the 'Microsoft Sentinel Contributors' group in your Azure Subscription.\n **Automated deployment in Azure**\n The credentials required to access the beforementioned APIs, are generated by a small PowerShell script that you can download below. The script performs the following tasks for you:\n - Logs you on to your Azure Subscription (please login with an administrator account)\n - Creates a new enterprise application for this connector in your Azure AD, also referred to as service principal\n - Creates a new role in your Azure IAM that grants read/query permission to only Azure Log Analytics workspaces.\n - Joins the enterprise application to that user role\n - Joins the enterprise application to the 'Microsoft Sentinel Contributors' role\n - Outputs some data that you need to configure app (see below)"", ""instructions"": []}, {""title"": ""Deployment procedure"", ""description"": ""1. Download the PowerShell deployment script from [here](https://github.com/signl4/signl4-integration-azuresentinel/blob/master/registerSIGNL4Client.ps1).\n2. Review the script and the roles and permission scopes it deploys for the new app registration. If you don't want to use the connector with Microsoft Sentinel, you could remove all role creation and role assignment code and only use it to create the app registration (SPN) in your Azure Active Directory.\n3. Run the script. At the end it outputs information that you need to enter in the connector app configuration.\n4. In Azure AD, click on 'App Registrations'. Find the app with the name 'SIGNL4AzureSecurity' and open its details\n5. On the left menu blade click 'API Permissions'. Then click 'Add a permission'.\n6. On the blade that loads, under 'Microsoft APIs' click on the 'Microsoft Graph' tile, then click 'App permission'.\n7. In the table that is displayed expand 'SecurityEvents' and check 'SecurityEvents.Read.All' and 'SecurityEvents.ReadWrite.All'.\n8. Click 'Add permissions'."", ""instructions"": []}, {""title"": ""Configuring the SIGNL4 connector app"", ""description"": ""Finally, enter the IDs, that the script has outputted in the connector configuration:\n - Azure Tenant ID\n - Azure Subscription ID\n - Client ID (of the enterprise application)\n - Client Secret (of the enterprise application)\n Once the app is enabled, it will start reading your Azure Graph Security API alerts.\n\n>**NOTE:** It will initially only read the alerts that have occurred within the last 24 hours."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SIGNL4/Data%20Connectors/DerdackSIGNL4.json","true" -"","SINEC Security Guard","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SINEC%20Security%20Guard","siemensplmsoftware","azure-sentinel-solution-ssg","2024-07-15","","","Siemens AG","Partner","https://siemens.com/sinec-security-guard","","domains,verticals","","","","","","","false","","false" -"SINECSecurityGuard_CL","SINEC Security Guard","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SINEC%20Security%20Guard","siemensplmsoftware","azure-sentinel-solution-ssg","2024-07-15","","","Siemens AG","Partner","https://siemens.com/sinec-security-guard","","domains,verticals","SSG","Siemens AG","SINEC Security Guard","The SINEC Security Guard solution for Microsoft Sentinel allows you to ingest security events of your industrial networks from the [SINEC Security Guard](https://siemens.com/sinec-security-guard) into Microsoft Sentinel","[{""description"": ""This Data Connector relies on the SINEC Security Guard Sensor Package to be able to receive Sensor events in Microsoft Sentinel. The Sensor Package can be purchased in the Siemens Xcelerator Marketplace."", ""instructions"": [{""parameters"": {""title"": ""1. Please follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Set up the SINEC Security Guard Sensor"", ""description"": ""Detailed step for setting up the sensor.""}, {""title"": ""Create the Data Connector and configure it in the SINEC Security Guard web interface"", ""description"": ""Instructions on configuring the data connector.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SINEC%20Security%20Guard/Data%20Connectors/data_connector_GenericUI.json","true" -"","SOC Handbook","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SOC%20Handbook","microsoftsentinelcommunity","azure-sentinel-solution-sochandbook","2022-11-30","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","","","","","","","false","","false" -"","SOC Prime CCF","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SOC%20Prime%20CCF","socprimeinc1666873751297","azure-sentinel-solution-socprimeauditccp","2025-09-25","","","SOC Prime","Partner","https://socprime.com/","","domains","","","","","","","false","","false" -"SOCPrimeAuditLogs_CL","SOC Prime CCF","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SOC%20Prime%20CCF","socprimeinc1666873751297","azure-sentinel-solution-socprimeauditccp","2025-09-25","","","SOC Prime","Partner","https://socprime.com/","","domains","SOCPrimeAuditLogsDataConnector","Microsoft","SOC Prime Platform Audit Logs Data Connector","The [SOC Prime Audit Logs](https://tdm.socprime.com/login) data connector allows ingesting logs from the SOC Prime Platform API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the SOC Prime Platform API to fetch SOC Prime platform audit logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table, thus resulting in better performance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the SOC Prime Platform API \n Follow the instructions to obtain the credentials. you can also follow this [guide](https://tdm.socprime.com/login) to generate personal API key.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### Retrieve API Key\n 1. Log in to the SOC Prime Platform\n 2. Click [**Account**] icon -> [**Platform Settings**] -> [**API**] \n 3. Click [**Add New Key**] \n 4. In the modal that appears give your key a meaningful name, set expiration date and product APIs the key provides access to \n 5. Click on [**Generate**] \n 6. Copy the key and save it in a safe place. You won't be able to view it again once you close this modal ""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""SOC Prime API Key"", ""placeholder"": ""API Key"", ""type"": ""password"", ""name"": ""apitoken""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SOC%20Prime%20CCF/Data%20Connectors/SOCPrime_ccp/SOCPrime_DataConnectorDefinition.json","true" -"","SOC-Process-Framework","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SOC-Process-Framework","azuresentinel","azure-sentinel-solution-socprocessframework","2022-04-08","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"","SOX IT Compliance","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SOX%20IT%20Compliance","azuresentinel","azure-sentinel-solution-sox-it-compliance","2025-12-11","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"","SailPointIdentityNow","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SailPointIdentityNow","sailpoint1582673310610","sentinel_offering","2021-10-26","","","SailPoint","Partner","","","domains","","","","","","","false","","false" -"SailPointIDN_Events_CL","SailPointIdentityNow","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SailPointIdentityNow","sailpoint1582673310610","sentinel_offering","2021-10-26","","","SailPoint","Partner","","","domains","SailPointIdentityNow","SailPoint","SailPoint IdentityNow","The [SailPoint](https://www.sailpoint.com/) IdentityNow data connector provides the capability to ingest [SailPoint IdentityNow] search events into Microsoft Sentinel through the REST API. The connector provides customers the ability to extract audit information from their IdentityNow tenant. It is intended to make it even easier to bring IdentityNow user activity and governance events into Microsoft Sentinel to improve insights from your security incident and event monitoring solution.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the SailPoint IdentityNow REST API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the SailPoint IdentityNow API**\n\n [Follow the instructions](https://community.sailpoint.com/t5/IdentityNow-Articles/Best-Practice-Using-Personal-Access-Tokens-in-IdentityNow/ta-p/150471) to obtain the credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the SailPoint IdentityNow data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the SailPoint IdentityNow data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-sailpointidentitynow-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter other information and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the SailPoint IdentityNow data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-sailpointidentitynow-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. searcheventXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.9.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tTENANT_ID\n\t\tSHARED_KEY\n\t\tLIMIT\n\t\tGRANT_TYPE\n\t\tCUSTOMER_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tAZURE_STORAGE_ACCESS_KEY\n\t\tAZURE_STORAGE_ACCOUNT_NAME\n\t\tAzureWebJobsStorage\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""SailPoint IdentityNow API Authentication Credentials"", ""description"": ""TENANT_ID, CLIENT_ID and CLIENT_SECRET are required for authentication.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SailPointIdentityNow/Data%20Connectors/SailPoint_IdentityNow_FunctionApp.json","true" -"SailPointIDN_Triggers_CL","SailPointIdentityNow","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SailPointIdentityNow","sailpoint1582673310610","sentinel_offering","2021-10-26","","","SailPoint","Partner","","","domains","SailPointIdentityNow","SailPoint","SailPoint IdentityNow","The [SailPoint](https://www.sailpoint.com/) IdentityNow data connector provides the capability to ingest [SailPoint IdentityNow] search events into Microsoft Sentinel through the REST API. The connector provides customers the ability to extract audit information from their IdentityNow tenant. It is intended to make it even easier to bring IdentityNow user activity and governance events into Microsoft Sentinel to improve insights from your security incident and event monitoring solution.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the SailPoint IdentityNow REST API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the SailPoint IdentityNow API**\n\n [Follow the instructions](https://community.sailpoint.com/t5/IdentityNow-Articles/Best-Practice-Using-Personal-Access-Tokens-in-IdentityNow/ta-p/150471) to obtain the credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the SailPoint IdentityNow data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the SailPoint IdentityNow data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-sailpointidentitynow-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter other information and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the SailPoint IdentityNow data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-sailpointidentitynow-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. searcheventXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.9.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tTENANT_ID\n\t\tSHARED_KEY\n\t\tLIMIT\n\t\tGRANT_TYPE\n\t\tCUSTOMER_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tAZURE_STORAGE_ACCESS_KEY\n\t\tAZURE_STORAGE_ACCOUNT_NAME\n\t\tAzureWebJobsStorage\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""SailPoint IdentityNow API Authentication Credentials"", ""description"": ""TENANT_ID, CLIENT_ID and CLIENT_SECRET are required for authentication.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SailPointIdentityNow/Data%20Connectors/SailPoint_IdentityNow_FunctionApp.json","true" -"","SalemCyber","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SalemCyber","saleminc1627928803559","salem-cyber-ai-analyst","2023-07-21","2023-07-21","","Salem Cyber","Partner","https://www.salemcyber.com/contact","","domains","","","","","","","false","","false" -"","Salesforce Service Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Salesforce%20Service%20Cloud","azuresentinel","azure-sentinel-solution-salesforceservicecloud","2022-05-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"SalesforceServiceCloudV2_CL","Salesforce Service Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Salesforce%20Service%20Cloud","azuresentinel","azure-sentinel-solution-salesforceservicecloud","2022-05-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","SalesforceServiceCloud","Salesforce","[DEPRECATED] Salesforce Service Cloud","The Salesforce Service Cloud data connector provides the capability to ingest information about your Salesforce operational events into Microsoft Sentinel through the REST API. The connector provides ability to review events in your org on an accelerated basis, get [event log files](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/event_log_file_hourly_overview.htm) in hourly increments for recent activity.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Salesforce Lightning Platform REST API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias SalesforceServiceCloud and load the function code or click [here](https://aka.ms/sentinel-SalesforceServiceCloud-parser). The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Salesforce Lightning Platform REST API**\n\n1. See the [link](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/quickstart.htm) and follow the instructions for obtaining Salesforce API Authorization credentials. \n2. On the **Set Up Authorization** step choose **Session ID Authorization** method.\n3. You must provide your client id, client secret, username, and password with user security token.""}, {""title"": """", ""description"": "">**NOTE:** Ingesting data from on an hourly interval may require additional licensing based on the edition of the Salesforce Service Cloud being used. Please refer to [Salesforce documentation](https://www.salesforce.com/editions-pricing/service-cloud/) and/or support for more details.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Salesforce Service Cloud data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Salesforce API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Salesforce Service Cloud data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SalesforceServiceCloud-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **Salesforce API Username**, **Salesforce API Password**, **Salesforce Security Token**, **Salesforce Consumer Key**, **Salesforce Consumer Secret** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Salesforce Service Cloud data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-SalesforceServiceCloud-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tSalesforceUser\n\t\tSalesforcePass\n\t\tSalesforceSecurityToken\n\t\tSalesforceConsumerKey\n\t\tSalesforceConsumerSecret\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`\n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Salesforce API Username**, **Salesforce API Password**, **Salesforce Security Token**, **Salesforce Consumer Key**, **Salesforce Consumer Secret** is required for REST API. [See the documentation to learn more about API](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/quickstart.htm).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Salesforce%20Service%20Cloud/Data%20Connectors/SalesforceServiceCloud_API_FunctionApp.json","true" -"SalesforceServiceCloud_CL","Salesforce Service Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Salesforce%20Service%20Cloud","azuresentinel","azure-sentinel-solution-salesforceservicecloud","2022-05-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","SalesforceServiceCloud","Salesforce","[DEPRECATED] Salesforce Service Cloud","The Salesforce Service Cloud data connector provides the capability to ingest information about your Salesforce operational events into Microsoft Sentinel through the REST API. The connector provides ability to review events in your org on an accelerated basis, get [event log files](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/event_log_file_hourly_overview.htm) in hourly increments for recent activity.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Salesforce Lightning Platform REST API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias SalesforceServiceCloud and load the function code or click [here](https://aka.ms/sentinel-SalesforceServiceCloud-parser). The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Salesforce Lightning Platform REST API**\n\n1. See the [link](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/quickstart.htm) and follow the instructions for obtaining Salesforce API Authorization credentials. \n2. On the **Set Up Authorization** step choose **Session ID Authorization** method.\n3. You must provide your client id, client secret, username, and password with user security token.""}, {""title"": """", ""description"": "">**NOTE:** Ingesting data from on an hourly interval may require additional licensing based on the edition of the Salesforce Service Cloud being used. Please refer to [Salesforce documentation](https://www.salesforce.com/editions-pricing/service-cloud/) and/or support for more details.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Salesforce Service Cloud data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Salesforce API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Salesforce Service Cloud data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SalesforceServiceCloud-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **Salesforce API Username**, **Salesforce API Password**, **Salesforce Security Token**, **Salesforce Consumer Key**, **Salesforce Consumer Secret** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Salesforce Service Cloud data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-SalesforceServiceCloud-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tSalesforceUser\n\t\tSalesforcePass\n\t\tSalesforceSecurityToken\n\t\tSalesforceConsumerKey\n\t\tSalesforceConsumerSecret\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`\n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Salesforce API Username**, **Salesforce API Password**, **Salesforce Security Token**, **Salesforce Consumer Key**, **Salesforce Consumer Secret** is required for REST API. [See the documentation to learn more about API](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/quickstart.htm).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Salesforce%20Service%20Cloud/Data%20Connectors/SalesforceServiceCloud_API_FunctionApp.json","true" -"SalesforceServiceCloudV2_CL","Salesforce Service Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Salesforce%20Service%20Cloud","azuresentinel","azure-sentinel-solution-salesforceservicecloud","2022-05-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","SalesforceServiceCloudCCPDefinition","Microsoft","Salesforce Service Cloud (via Codeless Connector Framework)","The Salesforce Service Cloud data connector provides the capability to ingest information about your Salesforce operational events into Microsoft Sentinel through the REST API. The connector provides ability to review events in your org on an accelerated basis, get [event log files](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/event_log_file_hourly_overview.htm) in hourly increments for recent activity.","[{""title"": ""Connect to Salesforce Service Cloud API to start collecting event logs in Microsoft Sentinel"", ""description"": ""Follow [Create a Connected App in Salesforce for OAuth](https://help.salesforce.com/s/articleView?id=platform.ev_relay_create_connected_app.htm&type=5) and [Configure a Connected App for the OAuth 2.0 Client Credentials Flow](https://help.salesforce.com/s/articleView?id=xcloud.connected_app_client_credentials_setup.htm&type=5) to create a Connected App with access to the Salesforce Service Cloud API. Through those instructions, you should get the Consumer Key and Consumer Secret.\n For Salesforce Domain name, Go to Setup, type My Domain in the Quick Find box, and select My Domain to view your domain details. Make sure to enter the domain name without a trailing slash (e.g., https://your-domain.my.salesforce.com). Fill the form below with that information."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Salesforce Domain Name"", ""placeholder"": ""Salesforce Domain Name"", ""type"": ""text"", ""name"": ""salesforceDomainName"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Log Collection Interval"", ""name"": ""queryType"", ""options"": [{""key"": ""SELECT Id,EventType,LogDate,Interval,CreatedDate,LogFile,LogFileLength FROM EventLogFile WHERE Interval='Hourly' and CreatedDate>{_QueryWindowStartTime} and CreatedDate<{_QueryWindowEndTime}"", ""text"": ""Hourly""}, {""key"": ""SELECT Id,EventType,LogDate,CreatedDate,LogFile,LogFileLength FROM EventLogFile WHERE CreatedDate>{_QueryWindowStartTime} and CreatedDate<{_QueryWindowEndTime}"", ""text"": ""Daily""}], ""placeholder"": ""Select an interval type"", ""isMultiSelect"": false, ""required"": true}}, {""type"": ""OAuthForm"", ""parameters"": {""clientIdLabel"": ""Consumer Key"", ""clientSecretLabel"": ""Consumer Secret"", ""clientIdPlaceholder"": ""Enter Connected App Consumer Key"", ""clientSecretPlaceholder"": ""Enter Connected App Consumer Secret"", ""connectButtonLabel"": ""Connect"", ""disconnectButtonLabel"": ""Disconnect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true, ""action"": false}}], ""customs"": [{""name"": ""Salesforce Service Cloud API access"", ""description"": ""Access to the Salesforce Service Cloud API through a Connected App is required.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Salesforce%20Service%20Cloud/Data%20Connectors/SalesforceSentinelConnector_CCP/SalesforceServiceCloud_DataConnectorDefinition.json","true" -"","Samsung Knox Asset Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Samsung%20Knox%20Asset%20Intelligence","samsungelectronics1632791654245","azure-sentinel-solution-samsung-knox-kai","2025-01-15","","","Samsung Electronics Co., Ltd.","Partner","https://www2.samsungknox.com/en/support","","domains","","","","","","","false","","false" -"Samsung_Knox_Application_CL","Samsung Knox Asset Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Samsung%20Knox%20Asset%20Intelligence","samsungelectronics1632791654245","azure-sentinel-solution-samsung-knox-kai","2025-01-15","","","Samsung Electronics Co., Ltd.","Partner","https://www2.samsungknox.com/en/support","","domains","SamsungDCDefinition","Samsung","Samsung Knox Asset Intelligence","Samsung Knox Asset Intelligence Data Connector lets you centralize your mobile security events and logs in order to view customized insights using the Workbook template, and identify incidents based on Analytics Rules templates.","[{""title"": """", ""description"": ""This Data Connector uses the Microsoft Log Ingestion API to push security events into Microsoft Sentinel from Samsung Knox Asset Intelligence (KAI) solution.""}, {""title"": ""STEP 1 - Create and register an Entra Application"", ""description"": "">**Note**: This Data Connector can support either Certificate-based or Client Secret-based authentication. For Certificate-based authentication, you can download the Samsung CA-signed certificate (public key) from [KAI documentation portal](https://docs.samsungknox.com/admin/knox-asset-intelligence/assets/samsung-knox-validation-certificate.crt). For Client Secret-based authentication, you can create the secret during the Entra application registration. Ensure you copy the Client Secret value as soon as it is generated.\n\n>**IMPORTANT:** Save the values for Tenant (Directory) ID and Client (Application) ID. If Client Secret-based authentication is enabled, save Client Secret (Secret Value) associated with the Entra app.""}, {""title"": ""STEP 2 - Automate deployment of this Data Connector using the below Azure Resource Manager (ARM) template"", ""description"": "">**IMPORTANT:** Before deploying the Data Connector, copy the below Workspace name associated with your Microsoft Sentinel (also your Log Analytics) instance."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceName""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": ""1. Click the button below to install Samsung Knox Intelligence Solution. \n\n\t[![DeployToAzure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SamsungDCDefinition-azuredeploy)\\n2. Provide the following required fields: Log Analytics Workspace Name, Log Analytics Workspace Location, Log Analytics Workspace Subscription (ID) and Log Analytics Workspace Resource Group.""}, {""title"": ""STEP 3 - Obtain Microsoft Sentinel Data Collection details"", ""description"": ""Once the ARM template is deployed, navigate to Data Collection Rules https://portal.azure.com/#browse/microsoft.insights%2Fdatacollectionrules? and save values associated with the Immutable ID (DCR) and Data Collection Endpoint (DCE). \n\n>**IMPORTANT:** To enable end-to-end integration, information related to Microsoft Sentinel DCE and DCR are required for configuration in Samsung Knox Asset Intelligence portal (STEP 4). \n\nEnsure the Entra Application created in STEP 1 has permissions to use the DCR created in order to send data to the DCE. Please refer to https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#assign-permissions-to-the-dcr to assign permissions accordingly.""}, {""title"": ""STEP 4 - Connect to Samsung Knox Asset Intelligence solution to configure Microsoft Sentinel to push select Knox Security Events as Alerts"", ""description"": ""1. Login to [Knox Asset Intelligence administration portal](https://central.samsungknox.com/kaiadmin/dai/home) and navigate to **Dashboard Settings**; this is available at the top-right corner of the Portal.\n> **Note**: Ensure the login user has access to 'Security' and 'Manage dashboard view and data collection' permissions.\n\n2. Click on Security tab to view settings for Microsoft Sentinel Integration and Knox Security Logs.\n\n3. In the Security Operations Integration page, toggle on **'Enable Microsoft Sentinel Integration'** and enter appropriate values in the required fields.\n\n >a. Based on the authentication method used, refer to information saved from STEP 1 while registering the Entra application. \n\n >b. For Microsoft Sentinel DCE and DCR, refer to the information saved from STEP 3. \n\n4. Click on **'Test Connection'** and ensure the connection is successful.\n\n5. Before you can Save, configure Knox Security Logs by selecting either Essential or Advanced configuration **(default: Essential).**\n\n6. To complete the Microsoft Sentinel integration, click **'Save'**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Entra app"", ""description"": ""An Entra app needs to be registered and provisioned with \u2018Microsoft Metrics Publisher\u2019 role and configured with either Certificate or Client Secret as credentials for secure data transfer. See [the Log ingestion tutorial to learn more about Entra App creation, registration and credential configuration.](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal) ""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Samsung%20Knox%20Asset%20Intelligence/Data%20Connectors/Template_Samsung.json","true" -"Samsung_Knox_Audit_CL","Samsung Knox Asset Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Samsung%20Knox%20Asset%20Intelligence","samsungelectronics1632791654245","azure-sentinel-solution-samsung-knox-kai","2025-01-15","","","Samsung Electronics Co., Ltd.","Partner","https://www2.samsungknox.com/en/support","","domains","SamsungDCDefinition","Samsung","Samsung Knox Asset Intelligence","Samsung Knox Asset Intelligence Data Connector lets you centralize your mobile security events and logs in order to view customized insights using the Workbook template, and identify incidents based on Analytics Rules templates.","[{""title"": """", ""description"": ""This Data Connector uses the Microsoft Log Ingestion API to push security events into Microsoft Sentinel from Samsung Knox Asset Intelligence (KAI) solution.""}, {""title"": ""STEP 1 - Create and register an Entra Application"", ""description"": "">**Note**: This Data Connector can support either Certificate-based or Client Secret-based authentication. For Certificate-based authentication, you can download the Samsung CA-signed certificate (public key) from [KAI documentation portal](https://docs.samsungknox.com/admin/knox-asset-intelligence/assets/samsung-knox-validation-certificate.crt). For Client Secret-based authentication, you can create the secret during the Entra application registration. Ensure you copy the Client Secret value as soon as it is generated.\n\n>**IMPORTANT:** Save the values for Tenant (Directory) ID and Client (Application) ID. If Client Secret-based authentication is enabled, save Client Secret (Secret Value) associated with the Entra app.""}, {""title"": ""STEP 2 - Automate deployment of this Data Connector using the below Azure Resource Manager (ARM) template"", ""description"": "">**IMPORTANT:** Before deploying the Data Connector, copy the below Workspace name associated with your Microsoft Sentinel (also your Log Analytics) instance."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceName""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": ""1. Click the button below to install Samsung Knox Intelligence Solution. \n\n\t[![DeployToAzure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SamsungDCDefinition-azuredeploy)\\n2. Provide the following required fields: Log Analytics Workspace Name, Log Analytics Workspace Location, Log Analytics Workspace Subscription (ID) and Log Analytics Workspace Resource Group.""}, {""title"": ""STEP 3 - Obtain Microsoft Sentinel Data Collection details"", ""description"": ""Once the ARM template is deployed, navigate to Data Collection Rules https://portal.azure.com/#browse/microsoft.insights%2Fdatacollectionrules? and save values associated with the Immutable ID (DCR) and Data Collection Endpoint (DCE). \n\n>**IMPORTANT:** To enable end-to-end integration, information related to Microsoft Sentinel DCE and DCR are required for configuration in Samsung Knox Asset Intelligence portal (STEP 4). \n\nEnsure the Entra Application created in STEP 1 has permissions to use the DCR created in order to send data to the DCE. Please refer to https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#assign-permissions-to-the-dcr to assign permissions accordingly.""}, {""title"": ""STEP 4 - Connect to Samsung Knox Asset Intelligence solution to configure Microsoft Sentinel to push select Knox Security Events as Alerts"", ""description"": ""1. Login to [Knox Asset Intelligence administration portal](https://central.samsungknox.com/kaiadmin/dai/home) and navigate to **Dashboard Settings**; this is available at the top-right corner of the Portal.\n> **Note**: Ensure the login user has access to 'Security' and 'Manage dashboard view and data collection' permissions.\n\n2. Click on Security tab to view settings for Microsoft Sentinel Integration and Knox Security Logs.\n\n3. In the Security Operations Integration page, toggle on **'Enable Microsoft Sentinel Integration'** and enter appropriate values in the required fields.\n\n >a. Based on the authentication method used, refer to information saved from STEP 1 while registering the Entra application. \n\n >b. For Microsoft Sentinel DCE and DCR, refer to the information saved from STEP 3. \n\n4. Click on **'Test Connection'** and ensure the connection is successful.\n\n5. Before you can Save, configure Knox Security Logs by selecting either Essential or Advanced configuration **(default: Essential).**\n\n6. To complete the Microsoft Sentinel integration, click **'Save'**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Entra app"", ""description"": ""An Entra app needs to be registered and provisioned with \u2018Microsoft Metrics Publisher\u2019 role and configured with either Certificate or Client Secret as credentials for secure data transfer. See [the Log ingestion tutorial to learn more about Entra App creation, registration and credential configuration.](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal) ""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Samsung%20Knox%20Asset%20Intelligence/Data%20Connectors/Template_Samsung.json","true" -"Samsung_Knox_Network_CL","Samsung Knox Asset Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Samsung%20Knox%20Asset%20Intelligence","samsungelectronics1632791654245","azure-sentinel-solution-samsung-knox-kai","2025-01-15","","","Samsung Electronics Co., Ltd.","Partner","https://www2.samsungknox.com/en/support","","domains","SamsungDCDefinition","Samsung","Samsung Knox Asset Intelligence","Samsung Knox Asset Intelligence Data Connector lets you centralize your mobile security events and logs in order to view customized insights using the Workbook template, and identify incidents based on Analytics Rules templates.","[{""title"": """", ""description"": ""This Data Connector uses the Microsoft Log Ingestion API to push security events into Microsoft Sentinel from Samsung Knox Asset Intelligence (KAI) solution.""}, {""title"": ""STEP 1 - Create and register an Entra Application"", ""description"": "">**Note**: This Data Connector can support either Certificate-based or Client Secret-based authentication. For Certificate-based authentication, you can download the Samsung CA-signed certificate (public key) from [KAI documentation portal](https://docs.samsungknox.com/admin/knox-asset-intelligence/assets/samsung-knox-validation-certificate.crt). For Client Secret-based authentication, you can create the secret during the Entra application registration. Ensure you copy the Client Secret value as soon as it is generated.\n\n>**IMPORTANT:** Save the values for Tenant (Directory) ID and Client (Application) ID. If Client Secret-based authentication is enabled, save Client Secret (Secret Value) associated with the Entra app.""}, {""title"": ""STEP 2 - Automate deployment of this Data Connector using the below Azure Resource Manager (ARM) template"", ""description"": "">**IMPORTANT:** Before deploying the Data Connector, copy the below Workspace name associated with your Microsoft Sentinel (also your Log Analytics) instance."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceName""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": ""1. Click the button below to install Samsung Knox Intelligence Solution. \n\n\t[![DeployToAzure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SamsungDCDefinition-azuredeploy)\\n2. Provide the following required fields: Log Analytics Workspace Name, Log Analytics Workspace Location, Log Analytics Workspace Subscription (ID) and Log Analytics Workspace Resource Group.""}, {""title"": ""STEP 3 - Obtain Microsoft Sentinel Data Collection details"", ""description"": ""Once the ARM template is deployed, navigate to Data Collection Rules https://portal.azure.com/#browse/microsoft.insights%2Fdatacollectionrules? and save values associated with the Immutable ID (DCR) and Data Collection Endpoint (DCE). \n\n>**IMPORTANT:** To enable end-to-end integration, information related to Microsoft Sentinel DCE and DCR are required for configuration in Samsung Knox Asset Intelligence portal (STEP 4). \n\nEnsure the Entra Application created in STEP 1 has permissions to use the DCR created in order to send data to the DCE. Please refer to https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#assign-permissions-to-the-dcr to assign permissions accordingly.""}, {""title"": ""STEP 4 - Connect to Samsung Knox Asset Intelligence solution to configure Microsoft Sentinel to push select Knox Security Events as Alerts"", ""description"": ""1. Login to [Knox Asset Intelligence administration portal](https://central.samsungknox.com/kaiadmin/dai/home) and navigate to **Dashboard Settings**; this is available at the top-right corner of the Portal.\n> **Note**: Ensure the login user has access to 'Security' and 'Manage dashboard view and data collection' permissions.\n\n2. Click on Security tab to view settings for Microsoft Sentinel Integration and Knox Security Logs.\n\n3. In the Security Operations Integration page, toggle on **'Enable Microsoft Sentinel Integration'** and enter appropriate values in the required fields.\n\n >a. Based on the authentication method used, refer to information saved from STEP 1 while registering the Entra application. \n\n >b. For Microsoft Sentinel DCE and DCR, refer to the information saved from STEP 3. \n\n4. Click on **'Test Connection'** and ensure the connection is successful.\n\n5. Before you can Save, configure Knox Security Logs by selecting either Essential or Advanced configuration **(default: Essential).**\n\n6. To complete the Microsoft Sentinel integration, click **'Save'**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Entra app"", ""description"": ""An Entra app needs to be registered and provisioned with \u2018Microsoft Metrics Publisher\u2019 role and configured with either Certificate or Client Secret as credentials for secure data transfer. See [the Log ingestion tutorial to learn more about Entra App creation, registration and credential configuration.](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal) ""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Samsung%20Knox%20Asset%20Intelligence/Data%20Connectors/Template_Samsung.json","true" -"Samsung_Knox_Process_CL","Samsung Knox Asset Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Samsung%20Knox%20Asset%20Intelligence","samsungelectronics1632791654245","azure-sentinel-solution-samsung-knox-kai","2025-01-15","","","Samsung Electronics Co., Ltd.","Partner","https://www2.samsungknox.com/en/support","","domains","SamsungDCDefinition","Samsung","Samsung Knox Asset Intelligence","Samsung Knox Asset Intelligence Data Connector lets you centralize your mobile security events and logs in order to view customized insights using the Workbook template, and identify incidents based on Analytics Rules templates.","[{""title"": """", ""description"": ""This Data Connector uses the Microsoft Log Ingestion API to push security events into Microsoft Sentinel from Samsung Knox Asset Intelligence (KAI) solution.""}, {""title"": ""STEP 1 - Create and register an Entra Application"", ""description"": "">**Note**: This Data Connector can support either Certificate-based or Client Secret-based authentication. For Certificate-based authentication, you can download the Samsung CA-signed certificate (public key) from [KAI documentation portal](https://docs.samsungknox.com/admin/knox-asset-intelligence/assets/samsung-knox-validation-certificate.crt). For Client Secret-based authentication, you can create the secret during the Entra application registration. Ensure you copy the Client Secret value as soon as it is generated.\n\n>**IMPORTANT:** Save the values for Tenant (Directory) ID and Client (Application) ID. If Client Secret-based authentication is enabled, save Client Secret (Secret Value) associated with the Entra app.""}, {""title"": ""STEP 2 - Automate deployment of this Data Connector using the below Azure Resource Manager (ARM) template"", ""description"": "">**IMPORTANT:** Before deploying the Data Connector, copy the below Workspace name associated with your Microsoft Sentinel (also your Log Analytics) instance."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceName""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": ""1. Click the button below to install Samsung Knox Intelligence Solution. \n\n\t[![DeployToAzure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SamsungDCDefinition-azuredeploy)\\n2. Provide the following required fields: Log Analytics Workspace Name, Log Analytics Workspace Location, Log Analytics Workspace Subscription (ID) and Log Analytics Workspace Resource Group.""}, {""title"": ""STEP 3 - Obtain Microsoft Sentinel Data Collection details"", ""description"": ""Once the ARM template is deployed, navigate to Data Collection Rules https://portal.azure.com/#browse/microsoft.insights%2Fdatacollectionrules? and save values associated with the Immutable ID (DCR) and Data Collection Endpoint (DCE). \n\n>**IMPORTANT:** To enable end-to-end integration, information related to Microsoft Sentinel DCE and DCR are required for configuration in Samsung Knox Asset Intelligence portal (STEP 4). \n\nEnsure the Entra Application created in STEP 1 has permissions to use the DCR created in order to send data to the DCE. Please refer to https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#assign-permissions-to-the-dcr to assign permissions accordingly.""}, {""title"": ""STEP 4 - Connect to Samsung Knox Asset Intelligence solution to configure Microsoft Sentinel to push select Knox Security Events as Alerts"", ""description"": ""1. Login to [Knox Asset Intelligence administration portal](https://central.samsungknox.com/kaiadmin/dai/home) and navigate to **Dashboard Settings**; this is available at the top-right corner of the Portal.\n> **Note**: Ensure the login user has access to 'Security' and 'Manage dashboard view and data collection' permissions.\n\n2. Click on Security tab to view settings for Microsoft Sentinel Integration and Knox Security Logs.\n\n3. In the Security Operations Integration page, toggle on **'Enable Microsoft Sentinel Integration'** and enter appropriate values in the required fields.\n\n >a. Based on the authentication method used, refer to information saved from STEP 1 while registering the Entra application. \n\n >b. For Microsoft Sentinel DCE and DCR, refer to the information saved from STEP 3. \n\n4. Click on **'Test Connection'** and ensure the connection is successful.\n\n5. Before you can Save, configure Knox Security Logs by selecting either Essential or Advanced configuration **(default: Essential).**\n\n6. To complete the Microsoft Sentinel integration, click **'Save'**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Entra app"", ""description"": ""An Entra app needs to be registered and provisioned with \u2018Microsoft Metrics Publisher\u2019 role and configured with either Certificate or Client Secret as credentials for secure data transfer. See [the Log ingestion tutorial to learn more about Entra App creation, registration and credential configuration.](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal) ""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Samsung%20Knox%20Asset%20Intelligence/Data%20Connectors/Template_Samsung.json","true" -"Samsung_Knox_System_CL","Samsung Knox Asset Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Samsung%20Knox%20Asset%20Intelligence","samsungelectronics1632791654245","azure-sentinel-solution-samsung-knox-kai","2025-01-15","","","Samsung Electronics Co., Ltd.","Partner","https://www2.samsungknox.com/en/support","","domains","SamsungDCDefinition","Samsung","Samsung Knox Asset Intelligence","Samsung Knox Asset Intelligence Data Connector lets you centralize your mobile security events and logs in order to view customized insights using the Workbook template, and identify incidents based on Analytics Rules templates.","[{""title"": """", ""description"": ""This Data Connector uses the Microsoft Log Ingestion API to push security events into Microsoft Sentinel from Samsung Knox Asset Intelligence (KAI) solution.""}, {""title"": ""STEP 1 - Create and register an Entra Application"", ""description"": "">**Note**: This Data Connector can support either Certificate-based or Client Secret-based authentication. For Certificate-based authentication, you can download the Samsung CA-signed certificate (public key) from [KAI documentation portal](https://docs.samsungknox.com/admin/knox-asset-intelligence/assets/samsung-knox-validation-certificate.crt). For Client Secret-based authentication, you can create the secret during the Entra application registration. Ensure you copy the Client Secret value as soon as it is generated.\n\n>**IMPORTANT:** Save the values for Tenant (Directory) ID and Client (Application) ID. If Client Secret-based authentication is enabled, save Client Secret (Secret Value) associated with the Entra app.""}, {""title"": ""STEP 2 - Automate deployment of this Data Connector using the below Azure Resource Manager (ARM) template"", ""description"": "">**IMPORTANT:** Before deploying the Data Connector, copy the below Workspace name associated with your Microsoft Sentinel (also your Log Analytics) instance."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceName""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": ""1. Click the button below to install Samsung Knox Intelligence Solution. \n\n\t[![DeployToAzure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SamsungDCDefinition-azuredeploy)\\n2. Provide the following required fields: Log Analytics Workspace Name, Log Analytics Workspace Location, Log Analytics Workspace Subscription (ID) and Log Analytics Workspace Resource Group.""}, {""title"": ""STEP 3 - Obtain Microsoft Sentinel Data Collection details"", ""description"": ""Once the ARM template is deployed, navigate to Data Collection Rules https://portal.azure.com/#browse/microsoft.insights%2Fdatacollectionrules? and save values associated with the Immutable ID (DCR) and Data Collection Endpoint (DCE). \n\n>**IMPORTANT:** To enable end-to-end integration, information related to Microsoft Sentinel DCE and DCR are required for configuration in Samsung Knox Asset Intelligence portal (STEP 4). \n\nEnsure the Entra Application created in STEP 1 has permissions to use the DCR created in order to send data to the DCE. Please refer to https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#assign-permissions-to-the-dcr to assign permissions accordingly.""}, {""title"": ""STEP 4 - Connect to Samsung Knox Asset Intelligence solution to configure Microsoft Sentinel to push select Knox Security Events as Alerts"", ""description"": ""1. Login to [Knox Asset Intelligence administration portal](https://central.samsungknox.com/kaiadmin/dai/home) and navigate to **Dashboard Settings**; this is available at the top-right corner of the Portal.\n> **Note**: Ensure the login user has access to 'Security' and 'Manage dashboard view and data collection' permissions.\n\n2. Click on Security tab to view settings for Microsoft Sentinel Integration and Knox Security Logs.\n\n3. In the Security Operations Integration page, toggle on **'Enable Microsoft Sentinel Integration'** and enter appropriate values in the required fields.\n\n >a. Based on the authentication method used, refer to information saved from STEP 1 while registering the Entra application. \n\n >b. For Microsoft Sentinel DCE and DCR, refer to the information saved from STEP 3. \n\n4. Click on **'Test Connection'** and ensure the connection is successful.\n\n5. Before you can Save, configure Knox Security Logs by selecting either Essential or Advanced configuration **(default: Essential).**\n\n6. To complete the Microsoft Sentinel integration, click **'Save'**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Entra app"", ""description"": ""An Entra app needs to be registered and provisioned with \u2018Microsoft Metrics Publisher\u2019 role and configured with either Certificate or Client Secret as credentials for secure data transfer. See [the Log ingestion tutorial to learn more about Entra App creation, registration and credential configuration.](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal) ""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Samsung%20Knox%20Asset%20Intelligence/Data%20Connectors/Template_Samsung.json","true" -"Samsung_Knox_User_CL","Samsung Knox Asset Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Samsung%20Knox%20Asset%20Intelligence","samsungelectronics1632791654245","azure-sentinel-solution-samsung-knox-kai","2025-01-15","","","Samsung Electronics Co., Ltd.","Partner","https://www2.samsungknox.com/en/support","","domains","SamsungDCDefinition","Samsung","Samsung Knox Asset Intelligence","Samsung Knox Asset Intelligence Data Connector lets you centralize your mobile security events and logs in order to view customized insights using the Workbook template, and identify incidents based on Analytics Rules templates.","[{""title"": """", ""description"": ""This Data Connector uses the Microsoft Log Ingestion API to push security events into Microsoft Sentinel from Samsung Knox Asset Intelligence (KAI) solution.""}, {""title"": ""STEP 1 - Create and register an Entra Application"", ""description"": "">**Note**: This Data Connector can support either Certificate-based or Client Secret-based authentication. For Certificate-based authentication, you can download the Samsung CA-signed certificate (public key) from [KAI documentation portal](https://docs.samsungknox.com/admin/knox-asset-intelligence/assets/samsung-knox-validation-certificate.crt). For Client Secret-based authentication, you can create the secret during the Entra application registration. Ensure you copy the Client Secret value as soon as it is generated.\n\n>**IMPORTANT:** Save the values for Tenant (Directory) ID and Client (Application) ID. If Client Secret-based authentication is enabled, save Client Secret (Secret Value) associated with the Entra app.""}, {""title"": ""STEP 2 - Automate deployment of this Data Connector using the below Azure Resource Manager (ARM) template"", ""description"": "">**IMPORTANT:** Before deploying the Data Connector, copy the below Workspace name associated with your Microsoft Sentinel (also your Log Analytics) instance."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceName""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": ""1. Click the button below to install Samsung Knox Intelligence Solution. \n\n\t[![DeployToAzure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SamsungDCDefinition-azuredeploy)\\n2. Provide the following required fields: Log Analytics Workspace Name, Log Analytics Workspace Location, Log Analytics Workspace Subscription (ID) and Log Analytics Workspace Resource Group.""}, {""title"": ""STEP 3 - Obtain Microsoft Sentinel Data Collection details"", ""description"": ""Once the ARM template is deployed, navigate to Data Collection Rules https://portal.azure.com/#browse/microsoft.insights%2Fdatacollectionrules? and save values associated with the Immutable ID (DCR) and Data Collection Endpoint (DCE). \n\n>**IMPORTANT:** To enable end-to-end integration, information related to Microsoft Sentinel DCE and DCR are required for configuration in Samsung Knox Asset Intelligence portal (STEP 4). \n\nEnsure the Entra Application created in STEP 1 has permissions to use the DCR created in order to send data to the DCE. Please refer to https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#assign-permissions-to-the-dcr to assign permissions accordingly.""}, {""title"": ""STEP 4 - Connect to Samsung Knox Asset Intelligence solution to configure Microsoft Sentinel to push select Knox Security Events as Alerts"", ""description"": ""1. Login to [Knox Asset Intelligence administration portal](https://central.samsungknox.com/kaiadmin/dai/home) and navigate to **Dashboard Settings**; this is available at the top-right corner of the Portal.\n> **Note**: Ensure the login user has access to 'Security' and 'Manage dashboard view and data collection' permissions.\n\n2. Click on Security tab to view settings for Microsoft Sentinel Integration and Knox Security Logs.\n\n3. In the Security Operations Integration page, toggle on **'Enable Microsoft Sentinel Integration'** and enter appropriate values in the required fields.\n\n >a. Based on the authentication method used, refer to information saved from STEP 1 while registering the Entra application. \n\n >b. For Microsoft Sentinel DCE and DCR, refer to the information saved from STEP 3. \n\n4. Click on **'Test Connection'** and ensure the connection is successful.\n\n5. Before you can Save, configure Knox Security Logs by selecting either Essential or Advanced configuration **(default: Essential).**\n\n6. To complete the Microsoft Sentinel integration, click **'Save'**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Entra app"", ""description"": ""An Entra app needs to be registered and provisioned with \u2018Microsoft Metrics Publisher\u2019 role and configured with either Certificate or Client Secret as credentials for secure data transfer. See [the Log ingestion tutorial to learn more about Entra App creation, registration and credential configuration.](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal) ""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Samsung%20Knox%20Asset%20Intelligence/Data%20Connectors/Template_Samsung.json","true" -"","SecurityBridge App","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SecurityBridge%20App","securitybridge1647511278080","securitybridge-sentinel-app-1","2022-02-17","","","SecurityBridge","Partner","https://securitybridge.com/contact/","","domains,verticals","","","","","","","false","","false" -"ABAPAuditLog","SecurityBridge App","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SecurityBridge%20App","securitybridge1647511278080","securitybridge-sentinel-app-1","2022-02-17","","","SecurityBridge","Partner","https://securitybridge.com/contact/","","domains,verticals","SecurityBridge","SecurityBridge Group GmbH","SecurityBridge Solution for SAP","SecurityBridge enhances SAP security by integrating seamlessly with Microsoft Sentinel, enabling real-time monitoring and threat detection across SAP environments. This integration allows Security Operations Centers (SOCs) to consolidate SAP security events with other organizational data, providing a unified view of the threat landscape . Leveraging AI-powered analytics and Microsoft’s Security Copilot, SecurityBridge identifies sophisticated attack patterns and vulnerabilities within SAP applications, including ABAP code scanning and configuration assessments . The solution supports scalable deployments across complex SAP landscapes, whether on-premises, in the cloud, or hybrid environments . By bridging the gap between IT and SAP security teams, SecurityBridge empowers organizations to proactively detect, investigate, and respond to threats, enhancing overall security posture.","[{""title"": ""1. Create ARM Resources and Provide the Required Permissions"", ""description"": ""We will create data collection rule (DCR) and data collection endpoint (DCE) resources. We will also create a Microsoft Entra app registration and assign the required permissions to it."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Automated deployment of Azure resources\nClicking on \""Deploy push connector resources\"" will trigger the creation of DCR and DCE resources.\nIt will then create a Microsoft Entra app registration with client secret and grant permissions on the DCR. This setup enables data to be sent securely to the DCR using a OAuth v2 client credentials.""}}, {""parameters"": {""label"": ""Deploy push connector resources"", ""applicationDisplayName"": ""SecurityBridge Solution for SAP""}, ""type"": ""DeployPushConnectorButton_test""}]}, {""title"": ""2. Maintain the data collection endpoint details and authentication info in SecurityBridge"", ""description"": ""Share the data collection endpoint URL and authentication info with the SecurityBridge administrator to configure the Securitybridge to send data to the data collection endpoint.\n\nLearn more from our KB Page https://abap-experts.atlassian.net/wiki/spaces/SB/pages/4099309579/REST+Push+Interface"", ""instructions"": [{""parameters"": {""label"": ""Use this value to configure as Tenant ID in the LogIngestionAPI credential."", ""fillWith"": [""TenantId""]}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra Application ID"", ""fillWith"": [""ApplicationId""], ""placeholder"": ""Deploy push connector to get the Application ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra Application Secret"", ""fillWith"": [""ApplicationSecret""], ""placeholder"": ""Deploy push connector to get the Application Secret""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Use this value to configure the LogsIngestionURL parameter when deploying the IFlow."", ""fillWith"": [""DataCollectionEndpoint""], ""placeholder"": ""Deploy push connector to get the DCE URI""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""DCR Immutable ID"", ""fillWith"": [""DataCollectionRuleId""], ""placeholder"": ""Deploy push connector to get the DCR ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Sentinel for SAP Stream ID"", ""value"": ""SAP_ABAPAUDITLOG""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""SecurityBridge_CL Stream ID"", ""value"": ""Custom-SecurityBridge_CL""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft Entra"", ""description"": ""Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher.""}, {""name"": ""Microsoft Azure"", ""description"": ""Permission to assign Monitoring Metrics Publisher role on data collection rules. Typically requires Azure RBAC Owner or User Access Administrator role.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SecurityBridge%20App/Data%20Connectors/SecurityBridge_PUSH_CCP/SecurityBridge_connectorDefinition.json","true" -"SecurityBridgeLogs_CL","SecurityBridge App","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SecurityBridge%20App","securitybridge1647511278080","securitybridge-sentinel-app-1","2022-02-17","","","SecurityBridge","Partner","https://securitybridge.com/contact/","","domains,verticals","SecurityBridgeSAP","SecurityBridge","SecurityBridge Threat Detection for SAP","SecurityBridge is the first and only holistic, natively integrated security platform, addressing all aspects needed to protect organizations running SAP from internal and external threats against their core business applications. The SecurityBridge platform is an SAP-certified add-on, used by organizations around the globe, and addresses the clients’ need for advanced cybersecurity, real-time monitoring, compliance, code security, and patching to protect against internal and external threats.This Microsoft Sentinel Solution allows you to integrate SecurityBridge Threat Detection events from all your on-premise and cloud based SAP instances into your security monitoring.Use this Microsoft Sentinel Solution to receive normalized and speaking security events, pre-built dashboards and out-of-the-box templates for your SAP security monitoring.","[{""title"": """", ""description"": ""*NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias SecurityBridgeLogs and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SecurityBridge%20App/Parsers/SecurityBridgeLogs.txt).The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": """", ""description"": "">**NOTE:** This data connector has been developed using SecurityBridge Application Platform 7.4.0."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""This solution requires logs collection via an Microsoft Sentinel agent installation\n\n> The Microsoft Sentinel agent is supported on the following Operating Systems: \n1. Windows Servers \n2. SUSE Linux Enterprise Server\n3. Redhat Linux Enterprise Server\n4. Oracle Linux Enterprise Server\n5. If you have the SAP solution installed on HPUX / AIX then you will need to deploy a log collector on one of the Linux options listed above and forward your logs to that collector\n\n"", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the custom log directory to be collected"", ""instructions"": [{""parameters"": {""linkType"": ""OpenCustomLogsSettings""}, ""type"": ""InstallAgent""}]}, {""description"": ""1. Select the link above to open your workspace advanced settings \n2. Click **+Add custom**\n3. Click **Browse** to upload a sample of a SecurityBridge SAP log file (e.g. AED_20211129164544.cef). Then, click **Next >**\n4. Select **New Line** as the record delimiter then click **Next >**\n5. Select **Windows** or **Linux** and enter the path to SecurityBridge logs based on your configuration. Example:\n - '/usr/sap/tmp/sb_events/*.cef' \n\n>**NOTE:** You can add as many paths as you want in the configuration.\n\n6. After entering the path, click the '+' symbol to apply, then click **Next >** \n7. Add **SecurityBridgeLogs** as the custom log Name and click **Done**""}, {""title"": ""3. Check logs in Microsoft Sentinel"", ""description"": ""Open Log Analytics to check if the logs are received using the SecurityBridgeLogs_CL Custom log table.\n\n>**NOTE:** It may take up to 30 minutes before new logs will appear in SecurityBridgeLogs_CL table."", ""instructions"": []}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SecurityBridge%20App/Data%20Connectors/Connector_SecurityBridge.json","true" -"","SecurityScorecard Cybersecurity Ratings","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SecurityScorecard%20Cybersecurity%20Ratings","SecurityScorecard","SecurityScorecard","2022-10-01","2022-10-01","","SecurityScorecard","Partner","https://support.securityscorecard.com/hc/en-us/requests/new","","domains","","","","","","","false","","false" -"SecurityScorecardFactor_CL","SecurityScorecard Cybersecurity Ratings","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SecurityScorecard%20Cybersecurity%20Ratings","SecurityScorecard","SecurityScorecard","2022-10-01","2022-10-01","","SecurityScorecard","Partner","https://support.securityscorecard.com/hc/en-us/requests/new","","domains","SecurityScorecardFactorAzureFunctions","SecurityScorecard","SecurityScorecard Factor","SecurityScorecard is the leader in cybersecurity risk ratings. The [SecurityScorecard](https://www.SecurityScorecard.com/) Factors data connector provides the ability for Sentinel to import SecurityScorecard factor ratings as logs. SecurityScorecard provides ratings for over 12 million companies and domains using countless data points from across the internet. Maintain full awareness of any company's security posture and be able to receive timely updates when factor scores change or drop. SecurityScorecard factor ratings are updated daily based on evidence collected across the web.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the SecurityScorecard API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the SecurityScorecard API**\n\n Follow these instructions to create/get a SecurityScorecard API token.\n 1. As an administrator in SecurityScorecard, navigate to My Settings and then Users\n 2. Click '+ Add User'\n 3. In the form, check off 'Check to create a bot user'\n 4. Provide a name for the Bot and provide it with Read Only permission\n 5. Click 'Add User'\n 6. Locate the newly created Bot user\n 7. Click 'create token' in the Bot user's row\n 8. Click 'Confirm' and note the API token that has been generated""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the SecurityScorecard Factor data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the SecurityScorecard API Authorization Key(s)"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the SecurityScorecard Factor connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SecurityScorecardFactorAPI-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tSecurityScorecard API Key \n\t\tSecurityScorecard Base URL (https://api.securityscorecard.io) \n\t\tDomain \n\t\tPortfolio IDs (Coma separated IDs) \n\t\tSecurityScorecard Factor Table Name (Default: SecurityScorecardFactor) \n\t\tLevel Factor Change (Default: 7) \n\t\tFactor Schedule (Default: 0 15 * * * *) \n\t\tDiff Override Own Factor (Default: true) \n\t\tDiff Override Portfolio Factor (Default: true) \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the SecurityScorecard Factor data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-SecurityScorecardFactorAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. SecurityScorecardXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tSecurityScorecard API Key \n\t\tSecurityScorecard Base URL (https://api.securityscorecard.io) \n\t\tDomain \n\t\tPortfolio IDs (Coma separated IDs) \n\t\tSecurityScorecard Factor Table Name (Default: SecurityScorecardFactor) \n\t\tLevel Factor Change (Default: 7) \n\t\tFactor Schedule (Default: 0 15 * * * *) \n\t\tDiff Override Own Factor (Default: true) \n\t\tDiff Override Portfolio Factor (Default: true) \n\t\tlogAnalyticsUri (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**SecurityScorecard API Key** is required.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SecurityScorecard%20Cybersecurity%20Ratings/Data%20Connectors/SecurityScorecardFactor/SecurityScorecardFactor_API_FunctionApp.json","true" -"SecurityScorecardIssues_CL","SecurityScorecard Cybersecurity Ratings","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SecurityScorecard%20Cybersecurity%20Ratings","SecurityScorecard","SecurityScorecard","2022-10-01","2022-10-01","","SecurityScorecard","Partner","https://support.securityscorecard.com/hc/en-us/requests/new","","domains","SecurityScorecardIssueAzureFunctions","SecurityScorecard","SecurityScorecard Issue","SecurityScorecard is the leader in cybersecurity risk ratings. The [SecurityScorecard](https://www.SecurityScorecard.com/) Issues data connector provides the ability for Sentinel to import SecurityScorecard issue data as logs. SecurityScorecard provides ratings for over 12 million companies and domains using countless data points from across the internet. Maintain full awareness of any company's security posture and be able to receive timely updates when new cybersecurity issues are discovered.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the SecurityScorecard API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the SecurityScorecard API**\n\n Follow these instructions to create/get a SecurityScorecard API token.\n 1. As an administrator in SecurityScorecard, navigate to My Settings and then Users\n 2. Click '+ Add User'\n 3. In the form, check off 'Check to create a bot user'\n 4. Provide a name for the Bot and provide it with Read Only permission\n 5. Click 'Add User'\n 6. Locate the newly created Bot user\n 7. Click 'create token' in the Bot user's row\n 8. Click 'Confirm' and note the API token that has been generated""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the SecurityScorecard Issue data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the SecurityScorecard API Authorization Key(s)"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the SecurityScorecard Issue connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SecurityScorecardIssueAPI-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tSecurityScorecard API Key \n\t\tSecurityScorecard Base URL (https://api.securityscorecard.io) \n\t\tDomain \n\t\tPortfolio IDs (Coma separated IDs) \n\t\tSecurityScorecard Issue Table Name (Default: SecurityScorecardIssue) \n\t\tLevel Issue Change (Default: 7) \n\t\tIssue Schedule (Default: 0 0,30 * * * *) \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the SecurityScorecard Issue data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-SecurityScorecardIssueAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. SecurityScorecardXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tSecurityScorecard API Key \n\t\tSecurityScorecard Base URL (https://api.securityscorecard.io) \n\t\tDomain \n\t\tPortfolio IDs (Coma separated IDs) \n\t\tSecurityScorecard Issue Table Name (Default: SecurityScorecardIssue) \n\t\tLevel Issue Change (Default: 7) \n\t\tIssue Schedule (Default: 0 0,30 * * * *) \n\t\tlogAnalyticsUri (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**SecurityScorecard API Key** is required.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SecurityScorecard%20Cybersecurity%20Ratings/Data%20Connectors/SecurityScorecardIssue/SecurityScorecardIssue_API_FunctionApp.json","true" -"SecurityScorecardRatings_CL","SecurityScorecard Cybersecurity Ratings","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SecurityScorecard%20Cybersecurity%20Ratings","SecurityScorecard","SecurityScorecard","2022-10-01","2022-10-01","","SecurityScorecard","Partner","https://support.securityscorecard.com/hc/en-us/requests/new","","domains","SecurityScorecardRatingsAzureFunctions","SecurityScorecard","SecurityScorecard Cybersecurity Ratings","SecurityScorecard is the leader in cybersecurity risk ratings. The [SecurityScorecard](https://www.SecurityScorecard.com/) data connector provides the ability for Sentinel to import SecurityScorecard ratings as logs. SecurityScorecard provides ratings for over 12 million companies and domains using countless data points from across the internet. Maintain full awareness of any company's security posture and be able to receive timely updates when scores change or drop. SecurityScorecard ratings are updated daily based on evidence collected across the web.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the SecurityScorecard API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the SecurityScorecard API**\n\n Follow these instructions to create/get a SecurityScorecard API token.\n 1. As an administrator in SecurityScorecard, navigate to My Settings and then Users\n 2. Click '+ Add User'\n 3. In the form, check off 'Check to create a bot user'\n 4. Provide a name for the Bot and provide it with Read Only permission\n 5. Click 'Add User'\n 6. Locate the newly created Bot user\n 7. Click 'create token' in the Bot user's row\n 8. Click 'Confirm' and note the API token that has been generated""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the SecurityScorecard Ratings data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the SecurityScorecard API Authorization Key(s)"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the SecurityScorecard Ratings connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SecurityScorecardRatingsAPI-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tSecurityScorecard API Key \n\t\tSecurityScorecard Base URL (https://api.securityscorecard.io) \n\t\tDomain \n\t\tPortfolio IDs (Coma separated IDs) \n\t\tSecurityScorecard Ratings Table Name (Default: SecurityScorecardRatings) \n\t\tLevel Ratings Change (Default: 7) \n\t\tRatings Schedule (Default: 0 45 * * * *) \n\t\tDiff Override Own Ratings (Default: true) \n\t\tDiff Override Portfolio Ratings (Default: true) \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the SecurityScorecard Ratings data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-SecurityScorecardRatingsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. SecurityScorecardXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tSecurityScorecard API Key \n\t\tSecurityScorecard Base URL (https://api.securityscorecard.io) \n\t\tDomain \n\t\tPortfolio IDs (Coma separated IDs) \n\t\tSecurityScorecard Ratings Table Name (Default: SecurityScorecardRatings) \n\t\tLevel Ratings Change (Default: 7) \n\t\tRatings Schedule (Default: 0 45 * * * *) \n\t\tDiff Override Own Ratings (Default: true) \n\t\tDiff Override Portfolio Ratings (Default: true) \n\t\tlogAnalyticsUri (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**SecurityScorecard API Key** is required.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SecurityScorecard%20Cybersecurity%20Ratings/Data%20Connectors/SecurityScorecardRatings/SecurityScorecardRatings_API_FunctionApp.json","true" -"","SecurityThreatEssentialSolution","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SecurityThreatEssentialSolution","azuresentinel","azure-sentinel-solution-securitythreatessentialsol","2022-03-30","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"","Semperis Directory Services Protector","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Semperis%20Directory%20Services%20Protector","semperis","directory-services-protector-solution","2021-10-18","","","Semperis","Partner","https://www.semperis.com/contact-us/","","domains","","","","","","","false","","false" -"SecurityEvent","Semperis Directory Services Protector","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Semperis%20Directory%20Services%20Protector","semperis","directory-services-protector-solution","2021-10-18","","","Semperis","Partner","https://www.semperis.com/contact-us/","","domains","SemperisDSP","SEMPERIS","Semperis Directory Services Protector","Semperis Directory Services Protector data connector allows for the export of its Windows event logs (i.e. Indicators of Exposure and Indicators of Compromise) to Microsoft Sentinel in real time.
It provides a data parser to manipulate the Windows event logs more easily. The different workbooks ease your Active Directory security monitoring and provide different ways to visualize the data. The analytic templates allow to automate responses regarding different events, exposures, or attacks.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**dsp_parser**](https://aka.ms/sentinel-SemperisDSP-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""**Configure Windows Security Events via AMA connector**"", ""description"": ""Collect Windows security events logs from your **Semperis DSP Management Server** .""}, {""title"": ""1. Install the Azure Monitor Agent (AMA)"", ""description"": ""On your **Semperis DSP Management Server** install the AMA on the DSP machine that will act as the event log forwarder.\nYou can skip this step if you have already installed the Microsoft agent for Windows""}, {""title"": ""2. Create a Data Collection Rule (DCR)"", ""description"": ""Start collecting logs from the **Semperis DSP Management Server** .\n\n1. In the Azure portal, navigate to your **Log Analytics workspace**.\n2. In the left pane, click on **Configuration** and then **Data connectors**.\n3. Find and install the **the Windows Security Events via AMA** connector.\n4. Click on **Open connector** and then on **Create data collection rule**.\n5. Configure the DCR with the necessary details, such as the log sources and the destination workspace."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Semperis DSP Management Server"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""**Configure Common Event Format via AMA connector**"", ""description"": ""Collect syslog messages send from your **Semperis DSP Management Server** .""}, {""title"": ""1. Install the Azure Monitor Agent (AMA)"", ""description"": ""Install the AMA on the Linux machine that will act as the log forwarder. This machine will collect and forward CEF logs to Microsoft Sentinel.\nYou can skip this step if you have already installed the Microsoft agent for Linux""}, {""title"": ""2. Create a Data Collection Rule (DCR)"", ""description"": ""Start collecting logs from the **Semperis DSP Management Server** .\n\n1. In the Azure portal, navigate to your **Log Analytics workspace**.\n2. In the left pane, click on **Configuration** and then **Data connectors**.\n3. Find and install the **the Common Event Format via AMA** connector.\n4. Click on **Open connector** and then on **Create data collection rule**.\n5. Configure the DCR with the necessary details, such as the log sources and the destination workspace."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Semperis DSP Management Server"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""3. Configure sending CEF logs on your Semperis DSP Management Server"", ""description"": ""Configure your **Semperis DSP Management Server** to send CEF logs to the Linux machine where the AMA is installed. This involves setting the destination IP address and port for the CEF logs""}, {""title"": """", ""description"": ""> You should now be able to receive logs in the *Windows event log* table and *common log* table, log data can be parsed using the **dsp_parser()** function, used by all query samples, workbooks and analytic templates.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Semperis%20Directory%20Services%20Protector/Data%20Connectors/SemperisDSP-connector.json","true" -"","SenservaPro","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SenservaPro","senservallc","senservapro4sentinel","2022-06-01","","","Senserva","Partner","https://www.senserva.com/contact/","","domains","","","","","","","false","","false" -"SenservaPro_CL","SenservaPro","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SenservaPro","senservallc","senservapro4sentinel","2022-06-01","","","Senserva","Partner","https://www.senserva.com/contact/","","domains","SenservaPro","Senserva","SenservaPro (Preview)","The SenservaPro data connector provides a viewing experience for your SenservaPro scanning logs. View dashboards of your data, use queries to hunt & explore, and create custom alerts.","[{""title"": ""1. Setup the data connection"", ""description"": ""Visit [Senserva Setup](https://www.senserva.com/senserva-microsoft-sentinel-edition-setup/) for information on setting up the Senserva data connection, support, or any other questions. The Senserva installation will configure a Log Analytics Workspace for output. Deploy Microsoft Sentinel onto the configured Log Analytics Workspace to finish the data connection setup by following [this onboarding guide.](https://docs.microsoft.com/azure/sentinel/quickstart-onboard)"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SenservaPro/Data%20Connectors/SenservaPro.json","true" -"","SentinelOne","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelOne","azuresentinel","azure-sentinel-solution-sentinelone","2024-11-26","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"SentinelOne_CL","SentinelOne","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelOne","azuresentinel","azure-sentinel-solution-sentinelone","2024-11-26","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SentinelOne","SentinelOne","SentinelOne","The [SentinelOne](https://www.sentinelone.com/) data connector provides the capability to ingest common SentinelOne server objects such as Threats, Agents, Applications, Activities, Policies, Groups, and more events into Microsoft Sentinel through the REST API. Refer to API documentation: `https://.sentinelone.net/api-doc/overview` for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the SentinelOne API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias SentinelOne and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelOne/Parsers/SentinelOne.txt). The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the SentinelOne API**\n\n Follow the instructions to obtain the credentials.\n\n1. Log in to the SentinelOne Management Console with Admin user credentials.\n2. In the Management Console, click **Settings**.\n3. In the **SETTINGS** view, click **USERS**\n4. Click **New User**.\n5. Enter the information for the new console user.\n5. In Role, select **Admin**.\n6. Click **SAVE**\n7. Save credentials of the new user for using in the data connector.""}, {""title"": """", ""description"": ""**NOTE :-** Admin access can be delegated using custom roles. Please review SentinelOne [documentation](https://www.sentinelone.com/blog/feature-spotlight-fully-custom-role-based-access-control/) to learn more about custom RBAC.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the SentinelOne data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the SentinelOne Audit data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SentinelOneAPI-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-SentinelOneAPI-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **SentinelOneAPIToken**, **SentinelOneUrl** `(https://.sentinelone.net)` and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the SentinelOne Reports data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-SentinelOneAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. SOneXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n 1. In the Function App, select the Function App Name and select **Configuration**.\n\n 2. In the **Application settings** tab, select ** New application setting**.\n\n 3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\t SentinelOneAPIToken\n\t\t SentinelOneUrl\n\t\t WorkspaceID\n\t\t WorkspaceKey\n\t\t logAnalyticsUri (optional)\n\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n\n 4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**SentinelOneAPIToken** is required. See the documentation to learn more about API on the `https://.sentinelone.net/api-doc/overview`.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelOne/Data%20Connectors/SentinelOne_API_FunctionApp.json","true" -"SentinelOneActivities_CL","SentinelOne","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelOne","azuresentinel","azure-sentinel-solution-sentinelone","2024-11-26","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SentinelOneCCP","Microsoft","SentinelOne","The [SentinelOne](https://usea1-nessat.sentinelone.net/api-doc/overview) data connector allows ingesting logs from the SentinelOne API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the SentinelOne API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the SentinelOne API \n Follow the instructions to obtain the credentials. You can also follow the [guide](https://usea1-nessat.sentinelone.net/docs/en/how-to-automate-api-token-generation.html#how-to-automate-api-token-generation) to generate API key.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve SentinelOne Management URL\n 1.1. Log in to the SentinelOne [**Management Console**] with Admin user credentials\n 1.2. In the [**Management Console**] copy the URL link above without the URL path.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve API Token\n 2.1. Log in to the SentinelOne [**Management Console**] with Admin user credentials\n 2.2. In the [**Management Console**], click [**Settings**]\n 2.3. In [**Settings**] view click on [**USERS**].\n 2.4. In the [**USERS**] Page click on [**Service Users**] -> [**Actions**] -> [**Create new service user**].\n 2.5. Choose [**Expiration date**] and [**scope**] (by site) and click on [**Create User**].\n 2.6. Once the [**Service User**] is created copy the [**API Token**] from page and press [**Save**]""}}, {""parameters"": {""label"": ""SentinelOne Management URL"", ""placeholder"": ""https://example.sentinelone.net/"", ""type"": ""text"", ""name"": ""managementUrl""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""securestring"", ""name"": ""apitoken""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelOne/Data%20Connectors/SentinelOne_ccp/connectorDefinition.json","true" -"SentinelOneAgents_CL","SentinelOne","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelOne","azuresentinel","azure-sentinel-solution-sentinelone","2024-11-26","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SentinelOneCCP","Microsoft","SentinelOne","The [SentinelOne](https://usea1-nessat.sentinelone.net/api-doc/overview) data connector allows ingesting logs from the SentinelOne API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the SentinelOne API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the SentinelOne API \n Follow the instructions to obtain the credentials. You can also follow the [guide](https://usea1-nessat.sentinelone.net/docs/en/how-to-automate-api-token-generation.html#how-to-automate-api-token-generation) to generate API key.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve SentinelOne Management URL\n 1.1. Log in to the SentinelOne [**Management Console**] with Admin user credentials\n 1.2. In the [**Management Console**] copy the URL link above without the URL path.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve API Token\n 2.1. Log in to the SentinelOne [**Management Console**] with Admin user credentials\n 2.2. In the [**Management Console**], click [**Settings**]\n 2.3. In [**Settings**] view click on [**USERS**].\n 2.4. In the [**USERS**] Page click on [**Service Users**] -> [**Actions**] -> [**Create new service user**].\n 2.5. Choose [**Expiration date**] and [**scope**] (by site) and click on [**Create User**].\n 2.6. Once the [**Service User**] is created copy the [**API Token**] from page and press [**Save**]""}}, {""parameters"": {""label"": ""SentinelOne Management URL"", ""placeholder"": ""https://example.sentinelone.net/"", ""type"": ""text"", ""name"": ""managementUrl""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""securestring"", ""name"": ""apitoken""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelOne/Data%20Connectors/SentinelOne_ccp/connectorDefinition.json","true" -"SentinelOneAlerts_CL","SentinelOne","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelOne","azuresentinel","azure-sentinel-solution-sentinelone","2024-11-26","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SentinelOneCCP","Microsoft","SentinelOne","The [SentinelOne](https://usea1-nessat.sentinelone.net/api-doc/overview) data connector allows ingesting logs from the SentinelOne API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the SentinelOne API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the SentinelOne API \n Follow the instructions to obtain the credentials. You can also follow the [guide](https://usea1-nessat.sentinelone.net/docs/en/how-to-automate-api-token-generation.html#how-to-automate-api-token-generation) to generate API key.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve SentinelOne Management URL\n 1.1. Log in to the SentinelOne [**Management Console**] with Admin user credentials\n 1.2. In the [**Management Console**] copy the URL link above without the URL path.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve API Token\n 2.1. Log in to the SentinelOne [**Management Console**] with Admin user credentials\n 2.2. In the [**Management Console**], click [**Settings**]\n 2.3. In [**Settings**] view click on [**USERS**].\n 2.4. In the [**USERS**] Page click on [**Service Users**] -> [**Actions**] -> [**Create new service user**].\n 2.5. Choose [**Expiration date**] and [**scope**] (by site) and click on [**Create User**].\n 2.6. Once the [**Service User**] is created copy the [**API Token**] from page and press [**Save**]""}}, {""parameters"": {""label"": ""SentinelOne Management URL"", ""placeholder"": ""https://example.sentinelone.net/"", ""type"": ""text"", ""name"": ""managementUrl""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""securestring"", ""name"": ""apitoken""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelOne/Data%20Connectors/SentinelOne_ccp/connectorDefinition.json","true" -"SentinelOneGroups_CL","SentinelOne","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelOne","azuresentinel","azure-sentinel-solution-sentinelone","2024-11-26","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SentinelOneCCP","Microsoft","SentinelOne","The [SentinelOne](https://usea1-nessat.sentinelone.net/api-doc/overview) data connector allows ingesting logs from the SentinelOne API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the SentinelOne API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the SentinelOne API \n Follow the instructions to obtain the credentials. You can also follow the [guide](https://usea1-nessat.sentinelone.net/docs/en/how-to-automate-api-token-generation.html#how-to-automate-api-token-generation) to generate API key.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve SentinelOne Management URL\n 1.1. Log in to the SentinelOne [**Management Console**] with Admin user credentials\n 1.2. In the [**Management Console**] copy the URL link above without the URL path.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve API Token\n 2.1. Log in to the SentinelOne [**Management Console**] with Admin user credentials\n 2.2. In the [**Management Console**], click [**Settings**]\n 2.3. In [**Settings**] view click on [**USERS**].\n 2.4. In the [**USERS**] Page click on [**Service Users**] -> [**Actions**] -> [**Create new service user**].\n 2.5. Choose [**Expiration date**] and [**scope**] (by site) and click on [**Create User**].\n 2.6. Once the [**Service User**] is created copy the [**API Token**] from page and press [**Save**]""}}, {""parameters"": {""label"": ""SentinelOne Management URL"", ""placeholder"": ""https://example.sentinelone.net/"", ""type"": ""text"", ""name"": ""managementUrl""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""securestring"", ""name"": ""apitoken""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelOne/Data%20Connectors/SentinelOne_ccp/connectorDefinition.json","true" -"SentinelOneThreats_CL","SentinelOne","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelOne","azuresentinel","azure-sentinel-solution-sentinelone","2024-11-26","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SentinelOneCCP","Microsoft","SentinelOne","The [SentinelOne](https://usea1-nessat.sentinelone.net/api-doc/overview) data connector allows ingesting logs from the SentinelOne API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the SentinelOne API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the SentinelOne API \n Follow the instructions to obtain the credentials. You can also follow the [guide](https://usea1-nessat.sentinelone.net/docs/en/how-to-automate-api-token-generation.html#how-to-automate-api-token-generation) to generate API key.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve SentinelOne Management URL\n 1.1. Log in to the SentinelOne [**Management Console**] with Admin user credentials\n 1.2. In the [**Management Console**] copy the URL link above without the URL path.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve API Token\n 2.1. Log in to the SentinelOne [**Management Console**] with Admin user credentials\n 2.2. In the [**Management Console**], click [**Settings**]\n 2.3. In [**Settings**] view click on [**USERS**].\n 2.4. In the [**USERS**] Page click on [**Service Users**] -> [**Actions**] -> [**Create new service user**].\n 2.5. Choose [**Expiration date**] and [**scope**] (by site) and click on [**Create User**].\n 2.6. Once the [**Service User**] is created copy the [**API Token**] from page and press [**Save**]""}}, {""parameters"": {""label"": ""SentinelOne Management URL"", ""placeholder"": ""https://example.sentinelone.net/"", ""type"": ""text"", ""name"": ""managementUrl""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""securestring"", ""name"": ""apitoken""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelOne/Data%20Connectors/SentinelOne_ccp/connectorDefinition.json","true" -"","SentinelSOARessentials","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelSOARessentials","azuresentinel","azure-sentinel-solution-sentinelsoaressentials","2022-06-27","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"","SeraphicSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SeraphicSecurity","seraphicalgorithmsltd1616061090462","seraphic-security-sentinel","2023-07-31","2023-07-31","","Seraphic Security","Partner","https://seraphicsecurity.com","","domains","","","","","","","false","","false" -"SeraphicWebSecurity_CL","SeraphicSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SeraphicSecurity","seraphicalgorithmsltd1616061090462","seraphic-security-sentinel","2023-07-31","2023-07-31","","Seraphic Security","Partner","https://seraphicsecurity.com","","domains","SeraphicWebSecurity","Seraphic","Seraphic Web Security","The Seraphic Web Security data connector provides the capability to ingest [Seraphic Web Security](https://seraphicsecurity.com/) events and alerts into Microsoft Sentinel.","[{""title"": ""Connect Seraphic Web Security"", ""description"": ""Please insert the integration name, the Seraphic integration URL and your workspace name for Microsoft Sentinel:"", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""Integration Name"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{subscriptionId}}""}, {""displayText"": ""Integration URL"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{endpoint}}""}, {""displayText"": ""Workspace Name - Log Analytics"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{workspaceName}}""}]}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true, ""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Seraphic API key"", ""description"": ""API key for Microsoft Sentinel connected to your Seraphic Web Security tenant. To get this API key for your tenant - [read this documentation](https://constellation.seraphicsecurity.com/integrations/microsoft_sentinel/Guidance/MicrosoftSentinel-IntegrationGuide-230822.pdf).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SeraphicSecurity/Data%20Connectors/SeraphicSecurityConnector.json","true" -"","ServiceNow TISC","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ServiceNow%20TISC","servicenow1594831756316","sentinel-solution-tisc","2025-01-15","2025-01-15","","ServiceNow","Partner","https://support.servicenow.com/now","","domains","","","","","","","false","","false" -"","Servicenow","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Servicenow","azuresentinel","azure-sentinel-solution-servicenow","2022-09-19","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"","SevcoSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SevcoSecurity","azuresentinel","azure-sentinel-solution-sevcosecurity","2023-05-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"Sevco_Devices_CL","SevcoSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SevcoSecurity","azuresentinel","azure-sentinel-solution-sevcosecurity","2023-05-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SevcoDevices","Sevco Security","Sevco Platform - Devices","The Sevco Platform - Devices connector allows you to easily connect your Sevco Device Assets with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization’s assets and improves your security operation capabilities.

[For more information >​](https://docs.sev.co/docs/microsoft-sentinel-inventory)","[{""title"": ""Configure and connect to Sevco"", ""description"": ""The Sevco Platform can integrate with and export assets directly to Microsoft Sentinel..\u200b\n\n1. Go to [Sevco - Microsoft Sentinel Integration](https://docs.sev.co/docs/microsoft-sentinel-inventory), and follow the instructions, using the parameters below to set up the connection:.\n\n"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SevcoSecurity/Data%20Connectors/Connector_SevcoSecurity.json","true" -"","ShadowByte Aria","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ShadowByte%20Aria","shadowbyte1641237427416","ariasent1","2021-12-24","","","Shadowbyte","Partner","https://shadowbyte.com/products/aria/","","domains","","","","","","","false","","false" -"","Shodan","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Shodan","azuresentinel","azure-sentinel-solution-shodan","2023-02-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"","Silverfort","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Silverfort","silverfort","microsoft-sentinel-solution-silverfort","2024-09-01","","","Silverfort","Partner","https://www.silverfort.com/customer-success/#support","","domains","","","","","","","false","","false" -"CommonSecurityLog","Silverfort","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Silverfort","silverfort","microsoft-sentinel-solution-silverfort","2024-09-01","","","Silverfort","Partner","https://www.silverfort.com/customer-success/#support","","domains","SilverfortAma","Silverfort","Silverfort Admin Console","The [Silverfort](https://silverfort.com) ITDR Admin Console connector solution allows ingestion of Silverfort events and logging into Microsoft Sentinel.
Silverfort provides syslog based events and logging using Common Event Format (CEF). By forwarding your Silverfort ITDR Admin Console CEF data into Microsoft Sentinel, you can take advantage of Sentinels's search & correlation, alerting, and threat intelligence enrichment on Silverfort data.
Please contact Silverfort or consult the Silverfort documentation for more information.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Silverfort/Data%20Connectors/SilverfortAma.json","true" -"","SlackAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SlackAudit","azuresentinel","azure-sentinel-solution-slackaudit","2021-03-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"SlackAuditNativePoller_CL","SlackAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SlackAudit","azuresentinel","azure-sentinel-solution-slackaudit","2021-03-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SlackAudit","Slack","Slack","The [Slack](https://slack.com) data connector provides the capability to ingest [Slack Audit Records](https://api.slack.com/admins/audit-logs) events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://api.slack.com/admins/audit-logs#the_audit_event) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more. This data connector uses Microsoft Sentinel native polling capability.","[{""title"": ""Connect Slack to Microsoft Sentinel"", ""description"": ""Enable Slack audit Logs."", ""instructions"": [{""parameters"": {""enable"": ""true""}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Slack API credentials"", ""description"": ""**SlackAPIBearerToken** is required for REST API. [See the documentation to learn more about API](https://api.slack.com/web#authentication). Check all [requirements and follow the instructions](https://api.slack.com/web#authentication) for obtaining credentials.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SlackAudit/Data%20Connectors/SlackNativePollerConnector/azuredeploy_Slack_native_poller_connector.json","true" -"SlackAuditNativePoller_CL","SlackAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SlackAudit","azuresentinel","azure-sentinel-solution-slackaudit","2021-03-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SlackAuditAPI","Slack","[DEPRECATED] Slack Audit","The [Slack](https://slack.com) Audit data connector provides the capability to ingest [Slack Audit Records](https://api.slack.com/admins/audit-logs) events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://api.slack.com/admins/audit-logs#the_audit_event) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Slack REST API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://aka.ms/sentinel-SlackAuditAPI-parser) to create the Kusto functions alias, **SlackAudit**""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Slack API**\n\n [Follow the instructions](https://api.slack.com/web#authentication) to obtain the credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Slack Audit data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Slack Audit data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SlackAuditAPI-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **SlackAPIBearerToken** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Slack Audit data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the [Azure Function App](https://aka.ms/sentinel-SlackAuditAPI-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tSlackAPIBearerToken\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**SlackAPIBearerToken** is required for REST API. [See the documentation to learn more about API](https://api.slack.com/web#authentication). Check all [requirements and follow the instructions](https://api.slack.com/web#authentication) for obtaining credentials.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SlackAudit/Data%20Connectors/SlackAudit_API_FunctionApp.json","true" -"SlackAuditV2_CL","SlackAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SlackAudit","azuresentinel","azure-sentinel-solution-slackaudit","2021-03-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SlackAuditAPI","Slack","[DEPRECATED] Slack Audit","The [Slack](https://slack.com) Audit data connector provides the capability to ingest [Slack Audit Records](https://api.slack.com/admins/audit-logs) events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://api.slack.com/admins/audit-logs#the_audit_event) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Slack REST API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://aka.ms/sentinel-SlackAuditAPI-parser) to create the Kusto functions alias, **SlackAudit**""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Slack API**\n\n [Follow the instructions](https://api.slack.com/web#authentication) to obtain the credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Slack Audit data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Slack Audit data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SlackAuditAPI-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **SlackAPIBearerToken** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Slack Audit data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the [Azure Function App](https://aka.ms/sentinel-SlackAuditAPI-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tSlackAPIBearerToken\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**SlackAPIBearerToken** is required for REST API. [See the documentation to learn more about API](https://api.slack.com/web#authentication). Check all [requirements and follow the instructions](https://api.slack.com/web#authentication) for obtaining credentials.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SlackAudit/Data%20Connectors/SlackAudit_API_FunctionApp.json","true" -"SlackAudit_CL","SlackAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SlackAudit","azuresentinel","azure-sentinel-solution-slackaudit","2021-03-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SlackAuditAPI","Slack","[DEPRECATED] Slack Audit","The [Slack](https://slack.com) Audit data connector provides the capability to ingest [Slack Audit Records](https://api.slack.com/admins/audit-logs) events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://api.slack.com/admins/audit-logs#the_audit_event) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Slack REST API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://aka.ms/sentinel-SlackAuditAPI-parser) to create the Kusto functions alias, **SlackAudit**""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Slack API**\n\n [Follow the instructions](https://api.slack.com/web#authentication) to obtain the credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Slack Audit data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Slack Audit data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SlackAuditAPI-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **SlackAPIBearerToken** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Slack Audit data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the [Azure Function App](https://aka.ms/sentinel-SlackAuditAPI-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tSlackAPIBearerToken\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**SlackAPIBearerToken** is required for REST API. [See the documentation to learn more about API](https://api.slack.com/web#authentication). Check all [requirements and follow the instructions](https://api.slack.com/web#authentication) for obtaining credentials.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SlackAudit/Data%20Connectors/SlackAudit_API_FunctionApp.json","true" -"SlackAuditV2_CL","SlackAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SlackAudit","azuresentinel","azure-sentinel-solution-slackaudit","2021-03-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SlackAuditLogsCCPDefinition","Microsoft","SlackAudit (via Codeless Connector Framework)","The SlackAudit data connector provides the capability to ingest [Slack Audit logs](https://api.slack.com/admins/audit-logs) into Microsoft Sentinel through the REST API. Refer to [API documentation](https://api.slack.com/admins/audit-logs-call) for more information.","[{""description"": ""To ingest data from SlackAudit to Microsoft Sentinel, you have to click on Add Domain button below then you get a pop up to fill the details, provide the required information and click on Connect. You can see the usernames, actions connected in the grid.\n>"", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""UserName"", ""columnValue"": ""properties.addOnAttributes.UserName""}, {""columnName"": ""Actions"", ""columnValue"": ""properties.addOnAttributes.Actions""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add domain"", ""title"": ""Add domain"", ""subtitle"": ""Add domain"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""UserName"", ""placeholder"": ""Enter your User Name"", ""name"": ""UserName"", ""type"": ""text"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""SlackAudit API Key"", ""placeholder"": ""Enter your API KEY"", ""name"": ""apiKey"", ""type"": ""password"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""SlackAudit Action Type"", ""placeholder"": ""Enter the Action Type"", ""name"": ""action"", ""type"": ""string"", ""required"": true}}]}]}}], ""title"": ""Connect SlackAudit to Microsoft Sentinel\n\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}], ""customs"": [{""name"": ""UserName, SlackAudit API Key & Action Type"", ""description"": ""To Generate the Access Token, create a new application in Slack, then add necessary scopes and configure the redirect URL. For detailed instructions on generating the access token, user name and action name limit, refer the [link](https://github.com/v-gsrihitha/v-gsrihitha/blob/main/SlackAudit/Readme.md).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SlackAudit/Data%20Connectors/SlackAuditLog_CCP/SlackAuditLog_ConnectorDefinition.json","true" -"","SlashNext","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SlashNext","slashnext1598548183597","slashnext-weblog-assessment-for-microsoft-sentinel","2022-08-12","2022-08-12","","SlashNext","Partner","https://support@slashnext.com","","domains","","","","","","","false","","false" -"AzureDiagnostics","SlashNext","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SlashNext","slashnext1598548183597","slashnext-weblog-assessment-for-microsoft-sentinel","2022-08-12","2022-08-12","","SlashNext","Partner","https://support@slashnext.com","","domains","SlashNextFunctionApp","SlashNext","SlashNext Function App","The SlashNext function app utilizes python to perform the analysis of the raw logs and returns URLs present in the logs.","[{""title"": ""Custom Deployment Using Azure Resource Manager (ARM) Template"", ""description"": ""Follow these steps to perform custom deployment of the SlashNext function app using ARM template:\n\n1. Click this [link](https://portal.azure.com/#create/Microsoft.Template) to open the Microsoft Azure Portal for custom deployment.\n2. Under the **Select a template** tab in the **Custom deployment** section, click **Build your own template in the editor**.\n3. Copy the contents of the **azuredeploy.json ARM template file** from this [GitHub repository](https://github.com/MuhammadAli-snx/Azure-Sentinel/blob/master/Solutions/SlashNext/FunctionApp/azuredeploy.json) and paste them into the **Edit template** section.\n4. Click the **Save** button.\n5. Select the preferred **Subscription**, **Resource Group**, and **Location**.\n6. Click **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.Storage/storageAccounts"", ""permissionsDisplayText"": ""read, write, and delete permissions on the storage account are required."", ""providerDisplayName"": ""Storage Account"", ""scope"": ""Storage Account"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Microsoft.Web/serverfarms permissions"", ""description"": ""Read and write permissions to Azure App Service Plan are required to create and manage the App Service Plan. [See the documentation to learn more about App Service Plans](https://learn.microsoft.com/azure/app-service/overview-hosting-plans).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SlashNext/Data%20Connectors/SlashNext_FunctionApp.json","true" -"","SlashNext SIEM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SlashNext%20SIEM","slashnext1598548183597","slashnext-security-events-for-microsoft-sentinel","2023-05-26","2023-06-16","","SlashNext","Partner","https://slashnext.com/support","","domains","","","","","","","false","","false" -"","Snowflake","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake","azuresentinel","azure-sentinel-solution-snowflake","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"Snowflake_CL","Snowflake","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake","azuresentinel","azure-sentinel-solution-snowflake","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SnowflakeDataConnector","Snowflake","[DEPRECATED] Snowflake","The Snowflake data connector provides the capability to ingest Snowflake [login logs](https://docs.snowflake.com/en/sql-reference/account-usage/login_history.html) and [query logs](https://docs.snowflake.com/en/sql-reference/account-usage/query_history.html) into Microsoft Sentinel using the Snowflake Python Connector. Refer to [Snowflake documentation](https://docs.snowflake.com/en/user-guide/python-connector.html) for more information.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Azure Blob Storage API to pull logs into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Snowflake**](https://aka.ms/sentinel-SnowflakeDataConnector-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Creating user in Snowflake**\n\nTo query data from Snowflake you need a user that is assigned to a role with sufficient privileges and a virtual warehouse cluster. The initial size of this cluster will be set to small but if it is insufficient, the cluster size can be increased as necessary.\n\n1. Enter the Snowflake console.\n2. Switch role to SECURITYADMIN and [create a new role](https://docs.snowflake.com/en/sql-reference/sql/create-role.html):\n```\nUSE ROLE SECURITYADMIN;\nCREATE OR REPLACE ROLE EXAMPLE_ROLE_NAME;```\n3. Switch role to SYSADMIN and [create warehouse](https://docs.snowflake.com/en/sql-reference/sql/create-warehouse.html) and [grand access](https://docs.snowflake.com/en/sql-reference/sql/grant-privilege.html) to it:\n```\nUSE ROLE SYSADMIN;\nCREATE OR REPLACE WAREHOUSE EXAMPLE_WAREHOUSE_NAME\n WAREHOUSE_SIZE = 'SMALL' \n AUTO_SUSPEND = 5\n AUTO_RESUME = true\n INITIALLY_SUSPENDED = true;\nGRANT USAGE, OPERATE ON WAREHOUSE EXAMPLE_WAREHOUSE_NAME TO ROLE EXAMPLE_ROLE_NAME;```\n4. Switch role to SECURITYADMIN and [create a new user](https://docs.snowflake.com/en/sql-reference/sql/create-user.html):\n```\nUSE ROLE SECURITYADMIN;\nCREATE OR REPLACE USER EXAMPLE_USER_NAME\n PASSWORD = 'example_password'\n DEFAULT_ROLE = EXAMPLE_ROLE_NAME\n DEFAULT_WAREHOUSE = EXAMPLE_WAREHOUSE_NAME\n;```\n5. Switch role to ACCOUNTADMIN and [grant access to snowflake database](https://docs.snowflake.com/en/sql-reference/account-usage.html#enabling-account-usage-for-other-roles) for role.\n```\nUSE ROLE ACCOUNTADMIN;\nGRANT IMPORTED PRIVILEGES ON DATABASE SNOWFLAKE TO ROLE EXAMPLE_ROLE_NAME;```\n6. Switch role to SECURITYADMIN and [assign role](https://docs.snowflake.com/en/sql-reference/sql/grant-role.html) to user:\n```\nUSE ROLE SECURITYADMIN;\nGRANT ROLE EXAMPLE_ROLE_NAME TO USER EXAMPLE_USER_NAME;```\n\n>**IMPORTANT:** Save user and API password created during this step as they will be used during deployment step.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as Snowflake credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SnowflakeDataConnector-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Snowflake Account Identifier**, **Snowflake User**, **Snowflake Password**, **Microsoft Sentinel Workspace Id**, **Microsoft Sentinel Shared Key**\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the [Azure Function App](https://aka.ms/sentinel-SnowflakeDataConnector-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration. \n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tSNOWFLAKE_ACCOUNT\n\t\tSNOWFLAKE_USER\n\t\tSNOWFLAKE_PASSWORD\n\t\tWORKSPACE_ID\n\t\tSHARED_KEY\n\t\tlogAnalyticsUri (optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://WORKSPACE_ID.ods.opinsights.azure.us`. \n4. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Snowflake Credentials"", ""description"": ""**Snowflake Account Identifier**, **Snowflake User** and **Snowflake Password** are required for connection. See the documentation to learn more about [Snowflake Account Identifier](https://docs.snowflake.com/en/user-guide/admin-account-identifier.html#). Instructions on how to create user for this connector you can find below.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/Snowflake_API_FunctionApp.json","true" -"SnowflakeLoad_CL","Snowflake","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake","azuresentinel","azure-sentinel-solution-snowflake","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SnowflakeLogsCCPDefinition","Microsoft","Snowflake (via Codeless Connector Framework)","The Snowflake data connector provides the capability to ingest Snowflake [Login History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/login_history), [Query History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/query_history), [User-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_users), [Role-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_roles), [Load History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/load_history), [Materialized View Refresh History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/materialized_view_refresh_history), [Roles Logs](https://docs.snowflake.com/en/sql-reference/account-usage/roles), [Tables Logs](https://docs.snowflake.com/en/sql-reference/account-usage/tables), [Table Storage Metrics Logs](https://docs.snowflake.com/en/sql-reference/account-usage/table_storage_metrics), [Users Logs](https://docs.snowflake.com/en/sql-reference/account-usage/users) into Microsoft Sentinel using the Snowflake SQL API. Refer to [Snowflake SQL API documentation](https://docs.snowflake.com/en/developer-guide/sql-api/reference) for more information.","[{""title"": ""Connect Snowflake to Microsoft Sentinel"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": "">**NOTE:** To ensure data is presented in separate columns for each field, execute the parser using the **Snowflake()** function""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""To gather data from Snowflake, you need to provide the following resources""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Account Identifier \n To gather data from Snowflake, you'll need Snowflake Account Identifier.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Programmatic Access Token \n To gather data from Snowflake, you'll need the Snowflake Programmatic Access Token""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""For detailed instructions on retrieving the Account Identifier and Programmatic Access Token, please refer to the [Connector Tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/Readme.md).""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Account-Identifier"", ""columnValue"": ""properties.addOnAttributes.AccountId""}, {""columnName"": ""Table Name"", ""columnValue"": ""properties.dataType""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add Account"", ""title"": ""Add Account"", ""subtitle"": ""Add Account"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake Account Identifier"", ""placeholder"": ""Enter Snowflake Account Identifier"", ""type"": ""text"", ""name"": ""accountId"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake PAT"", ""placeholder"": ""Enter Snowflake PAT"", ""type"": ""password"", ""name"": ""apikey"", ""validations"": {""required"": true}}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/SnowflakeLogs_ccp/SnowflakeLogs_ConnectorDefinition.json","true" -"SnowflakeLogin_CL","Snowflake","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake","azuresentinel","azure-sentinel-solution-snowflake","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SnowflakeLogsCCPDefinition","Microsoft","Snowflake (via Codeless Connector Framework)","The Snowflake data connector provides the capability to ingest Snowflake [Login History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/login_history), [Query History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/query_history), [User-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_users), [Role-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_roles), [Load History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/load_history), [Materialized View Refresh History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/materialized_view_refresh_history), [Roles Logs](https://docs.snowflake.com/en/sql-reference/account-usage/roles), [Tables Logs](https://docs.snowflake.com/en/sql-reference/account-usage/tables), [Table Storage Metrics Logs](https://docs.snowflake.com/en/sql-reference/account-usage/table_storage_metrics), [Users Logs](https://docs.snowflake.com/en/sql-reference/account-usage/users) into Microsoft Sentinel using the Snowflake SQL API. Refer to [Snowflake SQL API documentation](https://docs.snowflake.com/en/developer-guide/sql-api/reference) for more information.","[{""title"": ""Connect Snowflake to Microsoft Sentinel"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": "">**NOTE:** To ensure data is presented in separate columns for each field, execute the parser using the **Snowflake()** function""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""To gather data from Snowflake, you need to provide the following resources""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Account Identifier \n To gather data from Snowflake, you'll need Snowflake Account Identifier.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Programmatic Access Token \n To gather data from Snowflake, you'll need the Snowflake Programmatic Access Token""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""For detailed instructions on retrieving the Account Identifier and Programmatic Access Token, please refer to the [Connector Tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/Readme.md).""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Account-Identifier"", ""columnValue"": ""properties.addOnAttributes.AccountId""}, {""columnName"": ""Table Name"", ""columnValue"": ""properties.dataType""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add Account"", ""title"": ""Add Account"", ""subtitle"": ""Add Account"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake Account Identifier"", ""placeholder"": ""Enter Snowflake Account Identifier"", ""type"": ""text"", ""name"": ""accountId"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake PAT"", ""placeholder"": ""Enter Snowflake PAT"", ""type"": ""password"", ""name"": ""apikey"", ""validations"": {""required"": true}}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/SnowflakeLogs_ccp/SnowflakeLogs_ConnectorDefinition.json","true" -"SnowflakeMaterializedView_CL","Snowflake","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake","azuresentinel","azure-sentinel-solution-snowflake","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SnowflakeLogsCCPDefinition","Microsoft","Snowflake (via Codeless Connector Framework)","The Snowflake data connector provides the capability to ingest Snowflake [Login History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/login_history), [Query History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/query_history), [User-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_users), [Role-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_roles), [Load History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/load_history), [Materialized View Refresh History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/materialized_view_refresh_history), [Roles Logs](https://docs.snowflake.com/en/sql-reference/account-usage/roles), [Tables Logs](https://docs.snowflake.com/en/sql-reference/account-usage/tables), [Table Storage Metrics Logs](https://docs.snowflake.com/en/sql-reference/account-usage/table_storage_metrics), [Users Logs](https://docs.snowflake.com/en/sql-reference/account-usage/users) into Microsoft Sentinel using the Snowflake SQL API. Refer to [Snowflake SQL API documentation](https://docs.snowflake.com/en/developer-guide/sql-api/reference) for more information.","[{""title"": ""Connect Snowflake to Microsoft Sentinel"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": "">**NOTE:** To ensure data is presented in separate columns for each field, execute the parser using the **Snowflake()** function""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""To gather data from Snowflake, you need to provide the following resources""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Account Identifier \n To gather data from Snowflake, you'll need Snowflake Account Identifier.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Programmatic Access Token \n To gather data from Snowflake, you'll need the Snowflake Programmatic Access Token""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""For detailed instructions on retrieving the Account Identifier and Programmatic Access Token, please refer to the [Connector Tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/Readme.md).""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Account-Identifier"", ""columnValue"": ""properties.addOnAttributes.AccountId""}, {""columnName"": ""Table Name"", ""columnValue"": ""properties.dataType""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add Account"", ""title"": ""Add Account"", ""subtitle"": ""Add Account"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake Account Identifier"", ""placeholder"": ""Enter Snowflake Account Identifier"", ""type"": ""text"", ""name"": ""accountId"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake PAT"", ""placeholder"": ""Enter Snowflake PAT"", ""type"": ""password"", ""name"": ""apikey"", ""validations"": {""required"": true}}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/SnowflakeLogs_ccp/SnowflakeLogs_ConnectorDefinition.json","true" -"SnowflakeQuery_CL","Snowflake","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake","azuresentinel","azure-sentinel-solution-snowflake","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SnowflakeLogsCCPDefinition","Microsoft","Snowflake (via Codeless Connector Framework)","The Snowflake data connector provides the capability to ingest Snowflake [Login History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/login_history), [Query History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/query_history), [User-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_users), [Role-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_roles), [Load History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/load_history), [Materialized View Refresh History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/materialized_view_refresh_history), [Roles Logs](https://docs.snowflake.com/en/sql-reference/account-usage/roles), [Tables Logs](https://docs.snowflake.com/en/sql-reference/account-usage/tables), [Table Storage Metrics Logs](https://docs.snowflake.com/en/sql-reference/account-usage/table_storage_metrics), [Users Logs](https://docs.snowflake.com/en/sql-reference/account-usage/users) into Microsoft Sentinel using the Snowflake SQL API. Refer to [Snowflake SQL API documentation](https://docs.snowflake.com/en/developer-guide/sql-api/reference) for more information.","[{""title"": ""Connect Snowflake to Microsoft Sentinel"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": "">**NOTE:** To ensure data is presented in separate columns for each field, execute the parser using the **Snowflake()** function""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""To gather data from Snowflake, you need to provide the following resources""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Account Identifier \n To gather data from Snowflake, you'll need Snowflake Account Identifier.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Programmatic Access Token \n To gather data from Snowflake, you'll need the Snowflake Programmatic Access Token""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""For detailed instructions on retrieving the Account Identifier and Programmatic Access Token, please refer to the [Connector Tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/Readme.md).""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Account-Identifier"", ""columnValue"": ""properties.addOnAttributes.AccountId""}, {""columnName"": ""Table Name"", ""columnValue"": ""properties.dataType""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add Account"", ""title"": ""Add Account"", ""subtitle"": ""Add Account"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake Account Identifier"", ""placeholder"": ""Enter Snowflake Account Identifier"", ""type"": ""text"", ""name"": ""accountId"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake PAT"", ""placeholder"": ""Enter Snowflake PAT"", ""type"": ""password"", ""name"": ""apikey"", ""validations"": {""required"": true}}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/SnowflakeLogs_ccp/SnowflakeLogs_ConnectorDefinition.json","true" -"SnowflakeRoleGrant_CL","Snowflake","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake","azuresentinel","azure-sentinel-solution-snowflake","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SnowflakeLogsCCPDefinition","Microsoft","Snowflake (via Codeless Connector Framework)","The Snowflake data connector provides the capability to ingest Snowflake [Login History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/login_history), [Query History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/query_history), [User-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_users), [Role-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_roles), [Load History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/load_history), [Materialized View Refresh History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/materialized_view_refresh_history), [Roles Logs](https://docs.snowflake.com/en/sql-reference/account-usage/roles), [Tables Logs](https://docs.snowflake.com/en/sql-reference/account-usage/tables), [Table Storage Metrics Logs](https://docs.snowflake.com/en/sql-reference/account-usage/table_storage_metrics), [Users Logs](https://docs.snowflake.com/en/sql-reference/account-usage/users) into Microsoft Sentinel using the Snowflake SQL API. Refer to [Snowflake SQL API documentation](https://docs.snowflake.com/en/developer-guide/sql-api/reference) for more information.","[{""title"": ""Connect Snowflake to Microsoft Sentinel"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": "">**NOTE:** To ensure data is presented in separate columns for each field, execute the parser using the **Snowflake()** function""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""To gather data from Snowflake, you need to provide the following resources""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Account Identifier \n To gather data from Snowflake, you'll need Snowflake Account Identifier.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Programmatic Access Token \n To gather data from Snowflake, you'll need the Snowflake Programmatic Access Token""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""For detailed instructions on retrieving the Account Identifier and Programmatic Access Token, please refer to the [Connector Tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/Readme.md).""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Account-Identifier"", ""columnValue"": ""properties.addOnAttributes.AccountId""}, {""columnName"": ""Table Name"", ""columnValue"": ""properties.dataType""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add Account"", ""title"": ""Add Account"", ""subtitle"": ""Add Account"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake Account Identifier"", ""placeholder"": ""Enter Snowflake Account Identifier"", ""type"": ""text"", ""name"": ""accountId"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake PAT"", ""placeholder"": ""Enter Snowflake PAT"", ""type"": ""password"", ""name"": ""apikey"", ""validations"": {""required"": true}}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/SnowflakeLogs_ccp/SnowflakeLogs_ConnectorDefinition.json","true" -"SnowflakeRoles_CL","Snowflake","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake","azuresentinel","azure-sentinel-solution-snowflake","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SnowflakeLogsCCPDefinition","Microsoft","Snowflake (via Codeless Connector Framework)","The Snowflake data connector provides the capability to ingest Snowflake [Login History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/login_history), [Query History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/query_history), [User-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_users), [Role-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_roles), [Load History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/load_history), [Materialized View Refresh History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/materialized_view_refresh_history), [Roles Logs](https://docs.snowflake.com/en/sql-reference/account-usage/roles), [Tables Logs](https://docs.snowflake.com/en/sql-reference/account-usage/tables), [Table Storage Metrics Logs](https://docs.snowflake.com/en/sql-reference/account-usage/table_storage_metrics), [Users Logs](https://docs.snowflake.com/en/sql-reference/account-usage/users) into Microsoft Sentinel using the Snowflake SQL API. Refer to [Snowflake SQL API documentation](https://docs.snowflake.com/en/developer-guide/sql-api/reference) for more information.","[{""title"": ""Connect Snowflake to Microsoft Sentinel"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": "">**NOTE:** To ensure data is presented in separate columns for each field, execute the parser using the **Snowflake()** function""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""To gather data from Snowflake, you need to provide the following resources""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Account Identifier \n To gather data from Snowflake, you'll need Snowflake Account Identifier.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Programmatic Access Token \n To gather data from Snowflake, you'll need the Snowflake Programmatic Access Token""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""For detailed instructions on retrieving the Account Identifier and Programmatic Access Token, please refer to the [Connector Tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/Readme.md).""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Account-Identifier"", ""columnValue"": ""properties.addOnAttributes.AccountId""}, {""columnName"": ""Table Name"", ""columnValue"": ""properties.dataType""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add Account"", ""title"": ""Add Account"", ""subtitle"": ""Add Account"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake Account Identifier"", ""placeholder"": ""Enter Snowflake Account Identifier"", ""type"": ""text"", ""name"": ""accountId"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake PAT"", ""placeholder"": ""Enter Snowflake PAT"", ""type"": ""password"", ""name"": ""apikey"", ""validations"": {""required"": true}}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/SnowflakeLogs_ccp/SnowflakeLogs_ConnectorDefinition.json","true" -"SnowflakeTableStorageMetrics_CL","Snowflake","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake","azuresentinel","azure-sentinel-solution-snowflake","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SnowflakeLogsCCPDefinition","Microsoft","Snowflake (via Codeless Connector Framework)","The Snowflake data connector provides the capability to ingest Snowflake [Login History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/login_history), [Query History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/query_history), [User-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_users), [Role-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_roles), [Load History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/load_history), [Materialized View Refresh History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/materialized_view_refresh_history), [Roles Logs](https://docs.snowflake.com/en/sql-reference/account-usage/roles), [Tables Logs](https://docs.snowflake.com/en/sql-reference/account-usage/tables), [Table Storage Metrics Logs](https://docs.snowflake.com/en/sql-reference/account-usage/table_storage_metrics), [Users Logs](https://docs.snowflake.com/en/sql-reference/account-usage/users) into Microsoft Sentinel using the Snowflake SQL API. Refer to [Snowflake SQL API documentation](https://docs.snowflake.com/en/developer-guide/sql-api/reference) for more information.","[{""title"": ""Connect Snowflake to Microsoft Sentinel"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": "">**NOTE:** To ensure data is presented in separate columns for each field, execute the parser using the **Snowflake()** function""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""To gather data from Snowflake, you need to provide the following resources""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Account Identifier \n To gather data from Snowflake, you'll need Snowflake Account Identifier.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Programmatic Access Token \n To gather data from Snowflake, you'll need the Snowflake Programmatic Access Token""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""For detailed instructions on retrieving the Account Identifier and Programmatic Access Token, please refer to the [Connector Tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/Readme.md).""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Account-Identifier"", ""columnValue"": ""properties.addOnAttributes.AccountId""}, {""columnName"": ""Table Name"", ""columnValue"": ""properties.dataType""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add Account"", ""title"": ""Add Account"", ""subtitle"": ""Add Account"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake Account Identifier"", ""placeholder"": ""Enter Snowflake Account Identifier"", ""type"": ""text"", ""name"": ""accountId"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake PAT"", ""placeholder"": ""Enter Snowflake PAT"", ""type"": ""password"", ""name"": ""apikey"", ""validations"": {""required"": true}}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/SnowflakeLogs_ccp/SnowflakeLogs_ConnectorDefinition.json","true" -"SnowflakeTables_CL","Snowflake","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake","azuresentinel","azure-sentinel-solution-snowflake","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SnowflakeLogsCCPDefinition","Microsoft","Snowflake (via Codeless Connector Framework)","The Snowflake data connector provides the capability to ingest Snowflake [Login History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/login_history), [Query History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/query_history), [User-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_users), [Role-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_roles), [Load History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/load_history), [Materialized View Refresh History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/materialized_view_refresh_history), [Roles Logs](https://docs.snowflake.com/en/sql-reference/account-usage/roles), [Tables Logs](https://docs.snowflake.com/en/sql-reference/account-usage/tables), [Table Storage Metrics Logs](https://docs.snowflake.com/en/sql-reference/account-usage/table_storage_metrics), [Users Logs](https://docs.snowflake.com/en/sql-reference/account-usage/users) into Microsoft Sentinel using the Snowflake SQL API. Refer to [Snowflake SQL API documentation](https://docs.snowflake.com/en/developer-guide/sql-api/reference) for more information.","[{""title"": ""Connect Snowflake to Microsoft Sentinel"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": "">**NOTE:** To ensure data is presented in separate columns for each field, execute the parser using the **Snowflake()** function""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""To gather data from Snowflake, you need to provide the following resources""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Account Identifier \n To gather data from Snowflake, you'll need Snowflake Account Identifier.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Programmatic Access Token \n To gather data from Snowflake, you'll need the Snowflake Programmatic Access Token""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""For detailed instructions on retrieving the Account Identifier and Programmatic Access Token, please refer to the [Connector Tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/Readme.md).""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Account-Identifier"", ""columnValue"": ""properties.addOnAttributes.AccountId""}, {""columnName"": ""Table Name"", ""columnValue"": ""properties.dataType""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add Account"", ""title"": ""Add Account"", ""subtitle"": ""Add Account"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake Account Identifier"", ""placeholder"": ""Enter Snowflake Account Identifier"", ""type"": ""text"", ""name"": ""accountId"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake PAT"", ""placeholder"": ""Enter Snowflake PAT"", ""type"": ""password"", ""name"": ""apikey"", ""validations"": {""required"": true}}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/SnowflakeLogs_ccp/SnowflakeLogs_ConnectorDefinition.json","true" -"SnowflakeUserGrant_CL","Snowflake","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake","azuresentinel","azure-sentinel-solution-snowflake","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SnowflakeLogsCCPDefinition","Microsoft","Snowflake (via Codeless Connector Framework)","The Snowflake data connector provides the capability to ingest Snowflake [Login History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/login_history), [Query History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/query_history), [User-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_users), [Role-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_roles), [Load History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/load_history), [Materialized View Refresh History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/materialized_view_refresh_history), [Roles Logs](https://docs.snowflake.com/en/sql-reference/account-usage/roles), [Tables Logs](https://docs.snowflake.com/en/sql-reference/account-usage/tables), [Table Storage Metrics Logs](https://docs.snowflake.com/en/sql-reference/account-usage/table_storage_metrics), [Users Logs](https://docs.snowflake.com/en/sql-reference/account-usage/users) into Microsoft Sentinel using the Snowflake SQL API. Refer to [Snowflake SQL API documentation](https://docs.snowflake.com/en/developer-guide/sql-api/reference) for more information.","[{""title"": ""Connect Snowflake to Microsoft Sentinel"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": "">**NOTE:** To ensure data is presented in separate columns for each field, execute the parser using the **Snowflake()** function""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""To gather data from Snowflake, you need to provide the following resources""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Account Identifier \n To gather data from Snowflake, you'll need Snowflake Account Identifier.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Programmatic Access Token \n To gather data from Snowflake, you'll need the Snowflake Programmatic Access Token""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""For detailed instructions on retrieving the Account Identifier and Programmatic Access Token, please refer to the [Connector Tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/Readme.md).""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Account-Identifier"", ""columnValue"": ""properties.addOnAttributes.AccountId""}, {""columnName"": ""Table Name"", ""columnValue"": ""properties.dataType""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add Account"", ""title"": ""Add Account"", ""subtitle"": ""Add Account"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake Account Identifier"", ""placeholder"": ""Enter Snowflake Account Identifier"", ""type"": ""text"", ""name"": ""accountId"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake PAT"", ""placeholder"": ""Enter Snowflake PAT"", ""type"": ""password"", ""name"": ""apikey"", ""validations"": {""required"": true}}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/SnowflakeLogs_ccp/SnowflakeLogs_ConnectorDefinition.json","true" -"SnowflakeUsers_CL","Snowflake","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake","azuresentinel","azure-sentinel-solution-snowflake","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SnowflakeLogsCCPDefinition","Microsoft","Snowflake (via Codeless Connector Framework)","The Snowflake data connector provides the capability to ingest Snowflake [Login History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/login_history), [Query History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/query_history), [User-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_users), [Role-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_roles), [Load History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/load_history), [Materialized View Refresh History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/materialized_view_refresh_history), [Roles Logs](https://docs.snowflake.com/en/sql-reference/account-usage/roles), [Tables Logs](https://docs.snowflake.com/en/sql-reference/account-usage/tables), [Table Storage Metrics Logs](https://docs.snowflake.com/en/sql-reference/account-usage/table_storage_metrics), [Users Logs](https://docs.snowflake.com/en/sql-reference/account-usage/users) into Microsoft Sentinel using the Snowflake SQL API. Refer to [Snowflake SQL API documentation](https://docs.snowflake.com/en/developer-guide/sql-api/reference) for more information.","[{""title"": ""Connect Snowflake to Microsoft Sentinel"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": "">**NOTE:** To ensure data is presented in separate columns for each field, execute the parser using the **Snowflake()** function""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""To gather data from Snowflake, you need to provide the following resources""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Account Identifier \n To gather data from Snowflake, you'll need Snowflake Account Identifier.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Programmatic Access Token \n To gather data from Snowflake, you'll need the Snowflake Programmatic Access Token""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""For detailed instructions on retrieving the Account Identifier and Programmatic Access Token, please refer to the [Connector Tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/Readme.md).""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Account-Identifier"", ""columnValue"": ""properties.addOnAttributes.AccountId""}, {""columnName"": ""Table Name"", ""columnValue"": ""properties.dataType""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add Account"", ""title"": ""Add Account"", ""subtitle"": ""Add Account"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake Account Identifier"", ""placeholder"": ""Enter Snowflake Account Identifier"", ""type"": ""text"", ""name"": ""accountId"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake PAT"", ""placeholder"": ""Enter Snowflake PAT"", ""type"": ""password"", ""name"": ""apikey"", ""validations"": {""required"": true}}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true, ""action"": false}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/SnowflakeLogs_ccp/SnowflakeLogs_ConnectorDefinition.json","true" -"","SonicWall Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SonicWall%20Firewall","sonicwall-inc","sonicwall-networksecurity-azure-sentinal","2022-05-06","","","SonicWall","Partner","https://www.sonicwall.com/support/","","domains","","","","","","","false","","false" -"CommonSecurityLog","SonicWall Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SonicWall%20Firewall","sonicwall-inc","sonicwall-networksecurity-azure-sentinal","2022-05-06","","","SonicWall","Partner","https://www.sonicwall.com/support/","","domains","SonicWallFirewall","SonicWall","[Deprecated] SonicWall Firewall via Legacy Agent","Common Event Format (CEF) is an industry standard format on top of Syslog messages, used by SonicWall to allow event interoperability among different platforms. By connecting your CEF logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward SonicWall Firewall Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Set your SonicWall Firewall to send Syslog messages in CEF format to the proxy machine. Make sure you send the logs to port 514 TCP on the machine's IP address.\n\n Follow Instructions . Then Make sure you select local use 4 as the facility. Then select ArcSight as the Syslog format.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SonicWall%20Firewall/Data%20Connectors/SonicwallFirewall.json","true" -"CommonSecurityLog","SonicWall Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SonicWall%20Firewall","sonicwall-inc","sonicwall-networksecurity-azure-sentinal","2022-05-06","","","SonicWall","Partner","https://www.sonicwall.com/support/","","domains","SonicWallFirewallAma","SonicWall","[Deprecated] SonicWall Firewall via AMA","Common Event Format (CEF) is an industry standard format on top of Syslog messages, used by SonicWall to allow event interoperability among different platforms. By connecting your CEF logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine""}, {""title"": ""Step B. Forward SonicWall Firewall Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Set your SonicWall Firewall to send Syslog messages in CEF format to the proxy machine. Make sure you send the logs to port 514 TCP on the machine's IP address.\n\n Follow Instructions . Then Make sure you select local use 4 as the facility. Then select ArcSight as the Syslog format.""}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SonicWall%20Firewall/Data%20Connectors/template_SonicwallFirewallAMA.json","true" -"","SonraiSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SonraiSecurity","sonraisecurityllc1584373214489","sonrai_sentinel_offer","2021-10-18","","","Sonrai","Partner","","","domains","","","","","","","false","","false" -"Sonrai_Tickets_CL","SonraiSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SonraiSecurity","sonraisecurityllc1584373214489","sonrai_sentinel_offer","2021-10-18","","","Sonrai","Partner","","","domains","SonraiDataConnector","Sonrai","Sonrai Data Connector","Use this data connector to integrate with Sonrai Security and get Sonrai tickets sent directly to Microsoft Sentinel.","[{""title"": ""Sonrai Security Data Connector"", ""description"": ""1. Navigate to Sonrai Security dashboard.\n2. On the bottom left panel, click on integrations.\n3. Select Microsoft Sentinel from the list of available Integrations.\n4. Fill in the form using the information provided below."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SonraiSecurity/Data%20Connectors/Connector_REST_API_Sonrai.json","true" -"","Sophos Cloud Optix","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Sophos%20Cloud%20Optix","sophos","sophos_cloud_optix_mss","2022-05-02","","","Sophos","Partner","https://www.sophos.com/en-us/support","","domains","","","","","","","false","","false" -"SophosCloudOptix_CL","Sophos Cloud Optix","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Sophos%20Cloud%20Optix","sophos","sophos_cloud_optix_mss","2022-05-02","","","Sophos","Partner","https://www.sophos.com/en-us/support","","domains","SophosCloudOptix","Sophos","Sophos Cloud Optix","The [Sophos Cloud Optix](https://www.sophos.com/products/cloud-optix.aspx) connector allows you to easily connect your Sophos Cloud Optix logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's cloud security and compliance posture and improves your cloud security operation capabilities.","[{""title"": ""1. Get the Workspace ID and the Primary Key"", ""description"": ""Copy the Workspace ID and Primary Key for your workspace.\n"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""2. Configure the Sophos Cloud Optix Integration"", ""description"": ""In Sophos Cloud Optix go to [Settings->Integrations->Microsoft Sentinel](https://optix.sophos.com/#/integrations/sentinel) and enter the Workspace ID and Primary Key copied in Step 1.\n""}, {""title"": ""3. Select Alert Levels"", ""description"": ""In Alert Levels, select which Sophos Cloud Optix alerts you want to send to Microsoft Sentinel.\n""}, {""title"": ""4. Turn on the integration"", ""description"": ""To turn on the integration, select Enable, and then click Save.\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Sophos%20Cloud%20Optix/Data%20Connectors/Connector_REST_API_SophosCloudOptix.json","true" -"","Sophos Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Sophos%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-sophosep","2021-07-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"SophosEP_CL","Sophos Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Sophos%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-sophosep","2021-07-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","SophosEP","Sophos","Sophos Endpoint Protection","The [Sophos Endpoint Protection](https://www.sophos.com/en-us/products/endpoint-antivirus.aspx) data connector provides the capability to ingest [Sophos events](https://docs.sophos.com/central/Customer/help/en-us/central/Customer/common/concepts/Events.html) into Microsoft Sentinel. Refer to [Sophos Central Admin documentation](https://docs.sophos.com/central/Customer/help/en-us/central/Customer/concepts/Logs.html) for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Sophos Central APIs to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**SophosEPEvent**](https://aka.ms/sentinel-SophosEP-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Sophos Central API**\n\n Follow the instructions to obtain the credentials.\n\n1. In Sophos Central Admin, go to **Global Settings > API Token Management**.\n2. To create a new token, click **Add token** from the top-right corner of the screen.\n3. Select a **token name** and click **Save**. The **API Token Summary** for this token is displayed.\n4. Click **Copy** to copy your **API Access URL + Headers** from the **API Token Summary** section into your clipboard.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Sophos Endpoint Protection data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Sophos Endpoint Protection data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SophosEP-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **Sophos API Access URL and Headers**, **AzureSentinelWorkspaceId**, **AzureSentinelSharedKey**. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Sophos Endpoint Protection data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-SophosEP-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select **New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tSOPHOS_TOKEN\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**API token** is required. [See the documentation to learn more about API token](https://docs.sophos.com/central/Customer/help/en-us/central/Customer/concepts/ep_ApiTokenManagement.html)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Sophos%20Endpoint%20Protection/Data%20Connectors/SophosEP_API_FunctionApp.json","true" -"SophosEPAlerts_CL","Sophos Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Sophos%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-sophosep","2021-07-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","SophosEndpointProtectionCCPDefinition","Microsoft","Sophos Endpoint Protection (using REST API)","The [Sophos Endpoint Protection](https://www.sophos.com/en-us/products/endpoint-antivirus.aspx) data connector provides the capability to ingest [Sophos events](https://developer.sophos.com/docs/siem-v1/1/routes/events/get) and [Sophos alerts](https://developer.sophos.com/docs/siem-v1/1/routes/alerts/get) into Microsoft Sentinel. Refer to [Sophos Central Admin documentation](https://docs.sophos.com/central/Customer/help/en-us/central/Customer/concepts/Logs.html) for more information.","[{""description"": ""Follow [Sophos instructions](https://developer.sophos.com/getting-started-tenant) to create a service principal with access to the Sophos API. It will need the Service Principal ReadOnly role.\n Through those instructions, you should get the Client ID, Client Secret, Tenant ID and data region.\n Fill the form bellow with that information."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Sophos Tenant ID"", ""placeholder"": ""Sophos Tenant ID"", ""type"": ""text"", ""name"": ""sophosTenantId""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Sophos Tenant Data Region"", ""placeholder"": ""eu01, eu02, us01, us02 or us03"", ""type"": ""text"", ""name"": ""sophosRegion""}}, {""type"": ""OAuthForm"", ""parameters"": {""clientIdLabel"": ""Client ID"", ""clientSecretLabel"": ""Client Secret"", ""connectButtonLabel"": ""Connect"", ""disconnectButtonLabel"": ""Disconnect""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""name"", ""columnName"": ""Name""}, {""columnValue"": ""id"", ""columnName"": ""ID""}]}}], ""title"": ""Connect to Sophos Endpoint Protection API to start collecting event and alert logs in Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Sophos Endpoint Protection API access"", ""description"": ""Access to the Sophos Endpoint Protection API through a service principal is required.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Sophos%20Endpoint%20Protection/Data%20Connectors/SophosEP_ccp/SophosEP_DataConnectorDefinition.json","true" -"SophosEPEvents_CL","Sophos Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Sophos%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-sophosep","2021-07-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","SophosEndpointProtectionCCPDefinition","Microsoft","Sophos Endpoint Protection (using REST API)","The [Sophos Endpoint Protection](https://www.sophos.com/en-us/products/endpoint-antivirus.aspx) data connector provides the capability to ingest [Sophos events](https://developer.sophos.com/docs/siem-v1/1/routes/events/get) and [Sophos alerts](https://developer.sophos.com/docs/siem-v1/1/routes/alerts/get) into Microsoft Sentinel. Refer to [Sophos Central Admin documentation](https://docs.sophos.com/central/Customer/help/en-us/central/Customer/concepts/Logs.html) for more information.","[{""description"": ""Follow [Sophos instructions](https://developer.sophos.com/getting-started-tenant) to create a service principal with access to the Sophos API. It will need the Service Principal ReadOnly role.\n Through those instructions, you should get the Client ID, Client Secret, Tenant ID and data region.\n Fill the form bellow with that information."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Sophos Tenant ID"", ""placeholder"": ""Sophos Tenant ID"", ""type"": ""text"", ""name"": ""sophosTenantId""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Sophos Tenant Data Region"", ""placeholder"": ""eu01, eu02, us01, us02 or us03"", ""type"": ""text"", ""name"": ""sophosRegion""}}, {""type"": ""OAuthForm"", ""parameters"": {""clientIdLabel"": ""Client ID"", ""clientSecretLabel"": ""Client Secret"", ""connectButtonLabel"": ""Connect"", ""disconnectButtonLabel"": ""Disconnect""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""name"", ""columnName"": ""Name""}, {""columnValue"": ""id"", ""columnName"": ""ID""}]}}], ""title"": ""Connect to Sophos Endpoint Protection API to start collecting event and alert logs in Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Sophos Endpoint Protection API access"", ""description"": ""Access to the Sophos Endpoint Protection API through a service principal is required.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Sophos%20Endpoint%20Protection/Data%20Connectors/SophosEP_ccp/SophosEP_DataConnectorDefinition.json","true" -"","Sophos XG Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Sophos%20XG%20Firewall","azuresentinel","azure-sentinel-solution-sophosxgfirewall","2021-10-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"Syslog","Sophos XG Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Sophos%20XG%20Firewall","azuresentinel","azure-sentinel-solution-sophosxgfirewall","2021-10-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","SophosXGFirewall","Sophos","[Deprecated] Sophos XG Firewall","The [Sophos XG Firewall](https://www.sophos.com/products/next-gen-firewall.aspx) allows you to easily connect your Sophos XG Firewall logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigations. Integrating Sophos XG Firewall with Microsoft Sentinel provides more visibility into your organization's firewall traffic and will enhance security monitoring capabilities.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Sophos XG Firewall and load the function code or click [here](https://aka.ms/sentinel-SophosXG-parser), on the second line of the query, enter the hostname(s) of your Sophos XG Firewall device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n 2. Select **Apply below configuration to my machines** and select the facilities and severities.\n 3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure and connect the Sophos XG Firewall"", ""description"": ""[Follow these instructions](https://doc.sophos.com/nsg/sophos-firewall/20.0/Help/en-us/webhelp/onlinehelp/AdministratorHelp/SystemServices/LogSettings/SyslogServerAdd/index.html) to enable syslog streaming. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Sophos XG Firewall"", ""description"": ""must be configured to export logs via Syslog""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Sophos%20XG%20Firewall/Data%20Connectors/Connector_Syslog_SophosXGFirewall.json","true" -"","SpyCloud Enterprise Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SpyCloud%20Enterprise%20Protection","spycloudinc1680448518850","azure-sentinel-solution-spycloudenterprise","2023-09-09","","","Spycloud","Partner","https://portal.spycloud.com","","domains","","","","","","","false","","false" -"","Squadra Technologies SecRmm","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Squadra%20Technologies%20SecRmm","squadratechnologies","squadra_technologies_secrmm_mss","2022-05-09","","","Squadra Technologies","Partner","https://www.squadratechnologies.com/Contact.aspx","","domains","","","","","","","false","","false" -"secRMM_CL","Squadra Technologies SecRmm","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Squadra%20Technologies%20SecRmm","squadratechnologies","squadra_technologies_secrmm_mss","2022-05-09","","","Squadra Technologies","Partner","https://www.squadratechnologies.com/Contact.aspx","","domains","SquadraTechnologiesSecRMM","Squadra Technologies","Squadra Technologies secRMM","Use the Squadra Technologies secRMM Data Connector to push USB removable storage security event data into Microsoft Sentinel Log Analytics.","[{""title"": """", ""description"": ""Follow the step-by-step instructions provided in the [Squadra Technologies configuration guide for Azure Sentinel](https://www.squadratechnologies.com/StaticContent/ProductDownload/secRMM/9.11.0.0/secRMMAzureSentinelAdministratorGuide.pdf)"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Squadra%20Technologies%20SecRmm/Data%20Connectors/SquadraTechnologiesSecRMM.json","true" -"","SquidProxy","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SquidProxy","azuresentinel","azure-sentinel-solution-squidproxy","2022-05-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"SquidProxy_CL","SquidProxy","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SquidProxy","azuresentinel","azure-sentinel-solution-squidproxy","2022-05-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","SquidProxy","Squid","[Deprecated] Squid Proxy","The [Squid Proxy](http://www.squid-cache.org/) connector allows you to easily connect your Squid Proxy logs with Microsoft Sentinel. This gives you more insight into your organization's network proxy traffic and improves your security operation capabilities.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Squid Proxy and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SquidProxy/Parsers/SquidProxy.txt), on the second line of the query, enter the hostname(s) of your SquidProxy device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Squid Proxy server where the logs are generated.\n\n> Logs from Squid Proxy deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the custom log directory to be collected"", ""instructions"": [{""parameters"": {""linkType"": ""OpenCustomLogsSettings""}, ""type"": ""InstallAgent""}]}, {""title"": """", ""description"": ""1. Select the link above to open your workspace advanced settings \n2. From the left pane, select **Data**, select **Custom Logs** and click **Add+**\n3. Click **Browse** to upload a sample of a Squid Proxy log file(e.g. access.log or cache.log). Then, click **Next >**\n4. Select **New line** as the record delimiter and click **Next >**\n5. Select **Windows** or **Linux** and enter the path to Squid Proxy logs. Default paths are: \n - **Windows** directory: `C:\\Squid\\var\\log\\squid\\*.log`\n - **Linux** Directory: `/var/log/squid/*.log` \n6. After entering the path, click the '+' symbol to apply, then click **Next >** \n7. Add **SquidProxy_CL** as the custom log Name and click **Done**""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SquidProxy/Data%20Connectors/Connector_CustomLog_SquidProxy.json","true" -"","Styx Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Styx%20Intelligence","styx_intelligence","microsoft-sentinel-solution-styxintelligence","2025-02-07","","","Styx Intelligence","Partner","https://www.styxintel.com/contact-us/","","domains","","","","","","","false","","false" -"StyxViewAlerts_CL","Styx Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Styx%20Intelligence","styx_intelligence","microsoft-sentinel-solution-styxintelligence","2025-02-07","","","Styx Intelligence","Partner","https://www.styxintel.com/contact-us/","","domains","StyxViewEndpointConnectorDefinition","Styx Intelligence","StyxView Alerts (via Codeless Connector Platform)","The [StyxView Alerts](https://styxintel.com/) data connector enables seamless integration between the StyxView Alerts platform and Microsoft Sentinel. This connector ingests alert data from the StyxView Alerts API, allowing organizations to centralize and correlate actionable threat intelligence directly within their Microsoft Sentinel workspace.","[{""description"": ""Contact Styx Intelligence Support (support.team@styxintel.com) to get access to an API key."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""password"", ""name"": ""APIKey""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""title"": ""Connect to StyxView Alerts API to start collecting alert logs in Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""StyxView Alert API access"", ""description"": ""Access to the StyxView Alerts API through an API key is required.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Styx%20Intelligence/Data%20Connectors/Alerts/StyxView%20Alerts_ConnectorDefinition.json","true" -"","Symantec Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Symantec%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-symantecendpointprotection","2022-07-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"Syslog","Symantec Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Symantec%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-symantecendpointprotection","2022-07-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SymantecEndpointProtection","Broadcom","[Deprecated] Symantec Endpoint Protection","The [Broadcom Symantec Endpoint Protection (SEP)](https://www.broadcom.com/products/cyber-security/endpoint/end-user/enterprise) connector allows you to easily connect your SEP logs with Microsoft Sentinel. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Symantec Endpoint Protection and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Symantec%20Endpoint%20Protection/Parsers/SymantecEndpointProtection.yaml), on the second line of the query, enter the hostname(s) of your SymantecEndpointProtection device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n 2. Select **Apply below configuration to my machines** and select the facilities and severities.\n 3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure and connect the Symantec Endpoint Protection"", ""description"": ""[Follow these instructions](https://techdocs.broadcom.com/us/en/symantec-security-software/endpoint-security-and-management/endpoint-protection/all/Monitoring-Reporting-and-Enforcing-Compliance/viewing-logs-v7522439-d37e464/exporting-data-to-a-syslog-server-v8442743-d15e1107.html) to configure the Symantec Endpoint Protection to forward syslog. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Symantec Endpoint Protection (SEP)"", ""description"": ""must be configured to export logs via Syslog""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Symantec%20Endpoint%20Protection/Data%20Connectors/Connector_Syslog_SymantecEndpointProtection.json","true" -"","Symantec Integrated Cyber Defense","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Symantec%20Integrated%20Cyber%20Defense","azuresentinel","symantec_icdx_mss","2022-06-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"SymantecICDx_CL","Symantec Integrated Cyber Defense","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Symantec%20Integrated%20Cyber%20Defense","azuresentinel","symantec_icdx_mss","2022-06-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","Symantec","Symantec","Symantec Integrated Cyber Defense Exchange","Symantec ICDx connector allows you to easily connect your Symantec security solutions logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization’s network and improves your security operation capabilities.","[{""title"": ""Configure and connect Symantec ICDx"", ""description"": ""1. On the ICDx navigation bar, click **Configuration**.\n2. At the top of the **Configuration** screen, click **Forwarders**, and next to Microsoft Sentinel (Log Analytics), click **Add**.\n3. In the Microsoft Sentinel (Log Analytics) window that opens, click **Show Advanced**. [See the documentation to set advanced features](https://aka.ms/SymantecICDX-learn-more).\n4. Make sure that you set a name for the forwarder and under Azure Destination, set these required fields:\n - Workspace ID: Paste the Workspace ID from the Microsoft Sentinel portal connector page.\n - Primary Key: Paste the Primary Key from the Microsoft Sentinel portal connector page.\n - Custom Log Name: Type the custom log name in the Microsoft Azure portal Log Analytics workspace to which you are going to forward events. The default is SymantecICDx.\n5. Click Save and to start the forwarder, go to Options > More and click **Start**."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Symantec%20Integrated%20Cyber%20Defense/Data%20Connectors/SymantecICDX.JSON","true" -"","Symantec VIP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Symantec%20VIP","azuresentinel","azure-sentinel-solution-symantecvip","2022-05-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"Syslog","Symantec VIP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Symantec%20VIP","azuresentinel","azure-sentinel-solution-symantecvip","2022-05-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SymantecVIP","Symantec","[Deprecated] Symantec VIP","The [Symantec VIP](https://vip.symantec.com/) connector allows you to easily connect your Symantec VIP logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Symantec VIP and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Symantec%20VIP/Parsers/SymantecVIP.yaml), on the second line of the query, enter the hostname(s) of your Symantec VIP device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n 2. Select **Apply below configuration to my machines** and select the facilities and severities.\n 3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure and connect the Symantec VIP"", ""description"": ""[Follow these instructions](https://aka.ms/sentinel-symantecvip-configurationsteps) to configure the Symantec VIP Enterprise Gateway to forward syslog. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Symantec VIP"", ""description"": ""must be configured to export logs via Syslog""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Symantec%20VIP/Data%20Connectors/Connector_Syslog_SymantecVIP.json","true" -"","SymantecProxySG","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SymantecProxySG","azuresentinel","azure-sentinel-symantec-proxysg","2021-05-25","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"Syslog","SymantecProxySG","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SymantecProxySG","azuresentinel","azure-sentinel-symantec-proxysg","2021-05-25","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","SymantecProxySG","Symantec","[Deprecated] Symantec ProxySG","The [Symantec ProxySG](https://www.broadcom.com/products/cyber-security/network/gateway/proxy-sg-and-advanced-secure-gateway) allows you to easily connect your Symantec ProxySG logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigations. Integrating Symantec ProxySG with Microsoft Sentinel provides more visibility into your organization's network proxy traffic and will enhance security monitoring capabilities.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Symantec Proxy SG and load the function code or click [here](https://aka.ms/sentinel-SymantecProxySG-parser), on the second line of the query, enter the hostname(s) of your Symantec Proxy SG device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n 2. Select **Apply below configuration to my machines** and select the facilities and severities.\n 3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure and connect the Symantec ProxySG"", ""description"": "" \n 1. Log in to the Blue Coat Management Console .\n 2. Select Configuration > Access Logging > Formats.\n 3. Select New.\n 4. Enter a unique name in the Format Name field.\n 5. Click the radio button for **Custom format string** and paste the following string into the field.\n

1 $(date) $(time) $(time-taken) $(c-ip) $(cs-userdn) $(cs-auth-groups) $(x-exception-id) $(sc-filter-result) $(cs-categories) $(quot)$(cs(Referer))$(quot) $(sc-status) $(s-action) $(cs-method) $(quot)$(rs(Content-Type))$(quot) $(cs-uri-scheme) $(cs-host) $(cs-uri-port) $(cs-uri-path) $(cs-uri-query) $(cs-uri-extension) $(quot)$(cs(User-Agent))$(quot) $(s-ip) $(sr-bytes) $(rs-bytes) $(x-virus-id) $(x-bluecoat-application-name) $(x-bluecoat-application-operation) $(cs-uri-port) $(x-cs-client-ip-country) $(cs-threat-risk)

\n 6. Click the **OK** button. \n 7. Click the **Apply** button. \n 8. [Follow these instructions](https://knowledge.broadcom.com/external/article/166529/sending-access-logs-to-a-syslog-server.html) to enable syslog streaming of **Access** Logs. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Symantec ProxySG"", ""description"": ""must be configured to export logs via Syslog""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SymantecProxySG/Data%20Connectors/Connector_Syslog_SymantecProxySG.json","true" -"","Synack","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Synack","","","","","","","","","","","","","","","","","false","","false" -"","Syslog","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Syslog","azuresentinel","azure-sentinel-solution-syslog","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"Syslog","Syslog","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Syslog","azuresentinel","azure-sentinel-solution-syslog","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","Syslog","Microsoft","Syslog via Legacy Agent","Syslog is an event logging protocol that is common to Linux. Applications will send messages that may be stored on the local machine or delivered to a Syslog collector. When the Agent for Linux is installed, it configures the local Syslog daemon to forward messages to the agent. The agent then sends the message to the workspace.

[Learn more >](https://aka.ms/sysLogInfo)","[{""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""You can collect Syslog events from your local machine by installing the agent on it. You can also collect Syslog generated on a different source by running the installation script below on the local machine, where the agent is installed.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n\n1. Select the link below to open your workspace **agents configuration**, and select the **Syslog** tab.\n2. Select **Add facility** and choose from the drop-down list of facilities. Repeat for all the facilities you want to add.\n3. Mark the check boxes for the desired severities for each facility.\n4. Click **Apply**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Syslog/Data%20Connectors/template_Syslog.json","true" -"Syslog","Syslog","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Syslog","azuresentinel","azure-sentinel-solution-syslog","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SyslogAma","Microsoft","Syslog via AMA","Syslog is an event logging protocol that is common to Linux. Applications will send messages that may be stored on the local machine or delivered to a Syslog collector. When the Agent for Linux is installed, it configures the local Syslog daemon to forward messages to the agent. The agent then sends the message to the workspace.

[Learn more >](https://aka.ms/sysLogInfo)","[{""title"": ""Enable data collection rule\u200b"", ""description"": ""You can collect Syslog events from your local machine by installing the agent on it. You can also collect Syslog generated on a different source by running the installation script below on the local machine, where the agent is installed.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""type"": ""SysLogAma""}]}, {""instructions"": [{""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 6}, ""type"": ""InstallAgent""}]}, {""title"": ""Run the following command to install and apply the Syslog collector:"", ""description"": ""> To collect logs generated on a different machine run this script on the machine where the agent is installed."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces/datasources"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace data sources"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Syslog/Data%20Connectors/template_SyslogAma.json","true" -"","Talon","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Talon","taloncybersecurityltd1654088115170","talonconnector","2023-01-25","","","Talon Security","Partner","https://docs.console.talon-sec.com/","","domains","","","","","","","false","","false" -"Talon_CL","Talon","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Talon","taloncybersecurityltd1654088115170","talonconnector","2023-01-25","","","Talon Security","Partner","https://docs.console.talon-sec.com/","","domains","TalonLogs","Talon Security","Talon Insights","The Talon Security Logs connector allows you to easily connect your Talon events and audit logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation.","[{""title"": """", ""description"": ""Please note the values below and follow the instructions here to connect your Talon Security events and audit logs with Microsoft Sentinel."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Talon/Data%20Connectors/TalonLogs.json","true" -"","Tanium","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tanium","taniuminc1646329360287","tanium_sentinel_connector","2022-05-16","2025-07-03","","Tanium Inc.","Partner","https://support.tanium.com","","domains","","","","","","","false","","false" -"","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","","","","","","","false","","false" -"Cymru_Scout_Account_Usage_Data_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Team Cymru Scout API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create Team Cymru Scout API Key**\n\n Follow these instructions to create a Team Cymru Scout API Key.\n 1. Refer to the [API Keys](https://scout.cymru.com/docs/api#api-keys) document to generate an API key to use as an alternate form of authorization.\n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TeamCymruScout Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Upload csv with indictaors in Watchlist**\n\n Follow the steps in this section to upload csv containing indicators in watchlist:\n 1. In the Azure portal, Go to **Microsoft Sentinel** and select your workspace.\n 2. Go to **Watchlist** under **Configuration** section from left panel.\n 3. Click on **TeamCymruScoutDomainData**, and then select **Bulk update** from **Update watchlist**.\n 4. Upload your csv files with domain indicators in **Upload file** input and click on **Next: Review+Create**.\n 5. Once validation is successful, click on **Update**.\n 6. Follow the same steps to update *TeamCymruScoutIPData* watchlist for ip indicators. \n\n> **Reference link:** [Bulk update a watchlist](https://learn.microsoft.com/en-us/azure/sentinel/watchlists-manage#bulk-update-a-watchlist)""}, {""title"": """", ""description"": ""**STEP 7 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TeamCymruScout data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TeamCymruScout-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tLocation \n\t\tWorkspaceName \n\t\tFunction Name \n\t\tTeamCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tUsername \n\t\tPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAzureClientId \n\t\tAzureClientSecret \n\t\tTenantId \n\t\tAzureEntraObjectId \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TeamCymruScout data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TeamCymruScout310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CymruScoutXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tTeamCymruScoutUsername \n\t\tTeamCymruScoutPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAZURE_CLIENT_ID \n\t\tAZURE_CLIENT_SECRET \n\t\tAZURE_TENANT_ID \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n\t\tAZURE_DATA_COLLECTION_ENDPOINT \n\t\tAZURE_DATA_COLLECTION_RULE_ID_MAIN_TABLES \n\t\tAZURE_DATA_COLLECTION_RULE_ID_SUB_TABLES\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Permission to assign a role to the registered application"", ""description"": ""Permission to assign a role to the registered application in Microsoft Entra ID is required.""}, {""name"": ""Team Cymru Scout Credentials/permissions"", ""description"": ""Team Cymru Scout account credentials(Username, Password) is required.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" -"Cymru_Scout_Domain_Data_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Team Cymru Scout API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create Team Cymru Scout API Key**\n\n Follow these instructions to create a Team Cymru Scout API Key.\n 1. Refer to the [API Keys](https://scout.cymru.com/docs/api#api-keys) document to generate an API key to use as an alternate form of authorization.\n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TeamCymruScout Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Upload csv with indictaors in Watchlist**\n\n Follow the steps in this section to upload csv containing indicators in watchlist:\n 1. In the Azure portal, Go to **Microsoft Sentinel** and select your workspace.\n 2. Go to **Watchlist** under **Configuration** section from left panel.\n 3. Click on **TeamCymruScoutDomainData**, and then select **Bulk update** from **Update watchlist**.\n 4. Upload your csv files with domain indicators in **Upload file** input and click on **Next: Review+Create**.\n 5. Once validation is successful, click on **Update**.\n 6. Follow the same steps to update *TeamCymruScoutIPData* watchlist for ip indicators. \n\n> **Reference link:** [Bulk update a watchlist](https://learn.microsoft.com/en-us/azure/sentinel/watchlists-manage#bulk-update-a-watchlist)""}, {""title"": """", ""description"": ""**STEP 7 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TeamCymruScout data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TeamCymruScout-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tLocation \n\t\tWorkspaceName \n\t\tFunction Name \n\t\tTeamCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tUsername \n\t\tPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAzureClientId \n\t\tAzureClientSecret \n\t\tTenantId \n\t\tAzureEntraObjectId \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TeamCymruScout data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TeamCymruScout310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CymruScoutXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tTeamCymruScoutUsername \n\t\tTeamCymruScoutPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAZURE_CLIENT_ID \n\t\tAZURE_CLIENT_SECRET \n\t\tAZURE_TENANT_ID \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n\t\tAZURE_DATA_COLLECTION_ENDPOINT \n\t\tAZURE_DATA_COLLECTION_RULE_ID_MAIN_TABLES \n\t\tAZURE_DATA_COLLECTION_RULE_ID_SUB_TABLES\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Permission to assign a role to the registered application"", ""description"": ""Permission to assign a role to the registered application in Microsoft Entra ID is required.""}, {""name"": ""Team Cymru Scout Credentials/permissions"", ""description"": ""Team Cymru Scout account credentials(Username, Password) is required.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" -"Cymru_Scout_IP_Data_Communications_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Team Cymru Scout API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create Team Cymru Scout API Key**\n\n Follow these instructions to create a Team Cymru Scout API Key.\n 1. Refer to the [API Keys](https://scout.cymru.com/docs/api#api-keys) document to generate an API key to use as an alternate form of authorization.\n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TeamCymruScout Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Upload csv with indictaors in Watchlist**\n\n Follow the steps in this section to upload csv containing indicators in watchlist:\n 1. In the Azure portal, Go to **Microsoft Sentinel** and select your workspace.\n 2. Go to **Watchlist** under **Configuration** section from left panel.\n 3. Click on **TeamCymruScoutDomainData**, and then select **Bulk update** from **Update watchlist**.\n 4. Upload your csv files with domain indicators in **Upload file** input and click on **Next: Review+Create**.\n 5. Once validation is successful, click on **Update**.\n 6. Follow the same steps to update *TeamCymruScoutIPData* watchlist for ip indicators. \n\n> **Reference link:** [Bulk update a watchlist](https://learn.microsoft.com/en-us/azure/sentinel/watchlists-manage#bulk-update-a-watchlist)""}, {""title"": """", ""description"": ""**STEP 7 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TeamCymruScout data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TeamCymruScout-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tLocation \n\t\tWorkspaceName \n\t\tFunction Name \n\t\tTeamCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tUsername \n\t\tPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAzureClientId \n\t\tAzureClientSecret \n\t\tTenantId \n\t\tAzureEntraObjectId \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TeamCymruScout data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TeamCymruScout310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CymruScoutXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tTeamCymruScoutUsername \n\t\tTeamCymruScoutPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAZURE_CLIENT_ID \n\t\tAZURE_CLIENT_SECRET \n\t\tAZURE_TENANT_ID \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n\t\tAZURE_DATA_COLLECTION_ENDPOINT \n\t\tAZURE_DATA_COLLECTION_RULE_ID_MAIN_TABLES \n\t\tAZURE_DATA_COLLECTION_RULE_ID_SUB_TABLES\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Permission to assign a role to the registered application"", ""description"": ""Permission to assign a role to the registered application in Microsoft Entra ID is required.""}, {""name"": ""Team Cymru Scout Credentials/permissions"", ""description"": ""Team Cymru Scout account credentials(Username, Password) is required.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" -"Cymru_Scout_IP_Data_Details_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Team Cymru Scout API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create Team Cymru Scout API Key**\n\n Follow these instructions to create a Team Cymru Scout API Key.\n 1. Refer to the [API Keys](https://scout.cymru.com/docs/api#api-keys) document to generate an API key to use as an alternate form of authorization.\n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TeamCymruScout Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Upload csv with indictaors in Watchlist**\n\n Follow the steps in this section to upload csv containing indicators in watchlist:\n 1. In the Azure portal, Go to **Microsoft Sentinel** and select your workspace.\n 2. Go to **Watchlist** under **Configuration** section from left panel.\n 3. Click on **TeamCymruScoutDomainData**, and then select **Bulk update** from **Update watchlist**.\n 4. Upload your csv files with domain indicators in **Upload file** input and click on **Next: Review+Create**.\n 5. Once validation is successful, click on **Update**.\n 6. Follow the same steps to update *TeamCymruScoutIPData* watchlist for ip indicators. \n\n> **Reference link:** [Bulk update a watchlist](https://learn.microsoft.com/en-us/azure/sentinel/watchlists-manage#bulk-update-a-watchlist)""}, {""title"": """", ""description"": ""**STEP 7 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TeamCymruScout data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TeamCymruScout-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tLocation \n\t\tWorkspaceName \n\t\tFunction Name \n\t\tTeamCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tUsername \n\t\tPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAzureClientId \n\t\tAzureClientSecret \n\t\tTenantId \n\t\tAzureEntraObjectId \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TeamCymruScout data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TeamCymruScout310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CymruScoutXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tTeamCymruScoutUsername \n\t\tTeamCymruScoutPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAZURE_CLIENT_ID \n\t\tAZURE_CLIENT_SECRET \n\t\tAZURE_TENANT_ID \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n\t\tAZURE_DATA_COLLECTION_ENDPOINT \n\t\tAZURE_DATA_COLLECTION_RULE_ID_MAIN_TABLES \n\t\tAZURE_DATA_COLLECTION_RULE_ID_SUB_TABLES\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Permission to assign a role to the registered application"", ""description"": ""Permission to assign a role to the registered application in Microsoft Entra ID is required.""}, {""name"": ""Team Cymru Scout Credentials/permissions"", ""description"": ""Team Cymru Scout account credentials(Username, Password) is required.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" -"Cymru_Scout_IP_Data_Fingerprints_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Team Cymru Scout API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create Team Cymru Scout API Key**\n\n Follow these instructions to create a Team Cymru Scout API Key.\n 1. Refer to the [API Keys](https://scout.cymru.com/docs/api#api-keys) document to generate an API key to use as an alternate form of authorization.\n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TeamCymruScout Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Upload csv with indictaors in Watchlist**\n\n Follow the steps in this section to upload csv containing indicators in watchlist:\n 1. In the Azure portal, Go to **Microsoft Sentinel** and select your workspace.\n 2. Go to **Watchlist** under **Configuration** section from left panel.\n 3. Click on **TeamCymruScoutDomainData**, and then select **Bulk update** from **Update watchlist**.\n 4. Upload your csv files with domain indicators in **Upload file** input and click on **Next: Review+Create**.\n 5. Once validation is successful, click on **Update**.\n 6. Follow the same steps to update *TeamCymruScoutIPData* watchlist for ip indicators. \n\n> **Reference link:** [Bulk update a watchlist](https://learn.microsoft.com/en-us/azure/sentinel/watchlists-manage#bulk-update-a-watchlist)""}, {""title"": """", ""description"": ""**STEP 7 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TeamCymruScout data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TeamCymruScout-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tLocation \n\t\tWorkspaceName \n\t\tFunction Name \n\t\tTeamCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tUsername \n\t\tPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAzureClientId \n\t\tAzureClientSecret \n\t\tTenantId \n\t\tAzureEntraObjectId \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TeamCymruScout data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TeamCymruScout310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CymruScoutXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tTeamCymruScoutUsername \n\t\tTeamCymruScoutPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAZURE_CLIENT_ID \n\t\tAZURE_CLIENT_SECRET \n\t\tAZURE_TENANT_ID \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n\t\tAZURE_DATA_COLLECTION_ENDPOINT \n\t\tAZURE_DATA_COLLECTION_RULE_ID_MAIN_TABLES \n\t\tAZURE_DATA_COLLECTION_RULE_ID_SUB_TABLES\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Permission to assign a role to the registered application"", ""description"": ""Permission to assign a role to the registered application in Microsoft Entra ID is required.""}, {""name"": ""Team Cymru Scout Credentials/permissions"", ""description"": ""Team Cymru Scout account credentials(Username, Password) is required.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" -"Cymru_Scout_IP_Data_Foundation_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Team Cymru Scout API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create Team Cymru Scout API Key**\n\n Follow these instructions to create a Team Cymru Scout API Key.\n 1. Refer to the [API Keys](https://scout.cymru.com/docs/api#api-keys) document to generate an API key to use as an alternate form of authorization.\n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TeamCymruScout Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Upload csv with indictaors in Watchlist**\n\n Follow the steps in this section to upload csv containing indicators in watchlist:\n 1. In the Azure portal, Go to **Microsoft Sentinel** and select your workspace.\n 2. Go to **Watchlist** under **Configuration** section from left panel.\n 3. Click on **TeamCymruScoutDomainData**, and then select **Bulk update** from **Update watchlist**.\n 4. Upload your csv files with domain indicators in **Upload file** input and click on **Next: Review+Create**.\n 5. Once validation is successful, click on **Update**.\n 6. Follow the same steps to update *TeamCymruScoutIPData* watchlist for ip indicators. \n\n> **Reference link:** [Bulk update a watchlist](https://learn.microsoft.com/en-us/azure/sentinel/watchlists-manage#bulk-update-a-watchlist)""}, {""title"": """", ""description"": ""**STEP 7 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TeamCymruScout data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TeamCymruScout-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tLocation \n\t\tWorkspaceName \n\t\tFunction Name \n\t\tTeamCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tUsername \n\t\tPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAzureClientId \n\t\tAzureClientSecret \n\t\tTenantId \n\t\tAzureEntraObjectId \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TeamCymruScout data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TeamCymruScout310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CymruScoutXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tTeamCymruScoutUsername \n\t\tTeamCymruScoutPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAZURE_CLIENT_ID \n\t\tAZURE_CLIENT_SECRET \n\t\tAZURE_TENANT_ID \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n\t\tAZURE_DATA_COLLECTION_ENDPOINT \n\t\tAZURE_DATA_COLLECTION_RULE_ID_MAIN_TABLES \n\t\tAZURE_DATA_COLLECTION_RULE_ID_SUB_TABLES\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Permission to assign a role to the registered application"", ""description"": ""Permission to assign a role to the registered application in Microsoft Entra ID is required.""}, {""name"": ""Team Cymru Scout Credentials/permissions"", ""description"": ""Team Cymru Scout account credentials(Username, Password) is required.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" -"Cymru_Scout_IP_Data_OpenPorts_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Team Cymru Scout API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create Team Cymru Scout API Key**\n\n Follow these instructions to create a Team Cymru Scout API Key.\n 1. Refer to the [API Keys](https://scout.cymru.com/docs/api#api-keys) document to generate an API key to use as an alternate form of authorization.\n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TeamCymruScout Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Upload csv with indictaors in Watchlist**\n\n Follow the steps in this section to upload csv containing indicators in watchlist:\n 1. In the Azure portal, Go to **Microsoft Sentinel** and select your workspace.\n 2. Go to **Watchlist** under **Configuration** section from left panel.\n 3. Click on **TeamCymruScoutDomainData**, and then select **Bulk update** from **Update watchlist**.\n 4. Upload your csv files with domain indicators in **Upload file** input and click on **Next: Review+Create**.\n 5. Once validation is successful, click on **Update**.\n 6. Follow the same steps to update *TeamCymruScoutIPData* watchlist for ip indicators. \n\n> **Reference link:** [Bulk update a watchlist](https://learn.microsoft.com/en-us/azure/sentinel/watchlists-manage#bulk-update-a-watchlist)""}, {""title"": """", ""description"": ""**STEP 7 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TeamCymruScout data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TeamCymruScout-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tLocation \n\t\tWorkspaceName \n\t\tFunction Name \n\t\tTeamCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tUsername \n\t\tPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAzureClientId \n\t\tAzureClientSecret \n\t\tTenantId \n\t\tAzureEntraObjectId \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TeamCymruScout data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TeamCymruScout310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CymruScoutXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tTeamCymruScoutUsername \n\t\tTeamCymruScoutPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAZURE_CLIENT_ID \n\t\tAZURE_CLIENT_SECRET \n\t\tAZURE_TENANT_ID \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n\t\tAZURE_DATA_COLLECTION_ENDPOINT \n\t\tAZURE_DATA_COLLECTION_RULE_ID_MAIN_TABLES \n\t\tAZURE_DATA_COLLECTION_RULE_ID_SUB_TABLES\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Permission to assign a role to the registered application"", ""description"": ""Permission to assign a role to the registered application in Microsoft Entra ID is required.""}, {""name"": ""Team Cymru Scout Credentials/permissions"", ""description"": ""Team Cymru Scout account credentials(Username, Password) is required.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" -"Cymru_Scout_IP_Data_PDNS_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Team Cymru Scout API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create Team Cymru Scout API Key**\n\n Follow these instructions to create a Team Cymru Scout API Key.\n 1. Refer to the [API Keys](https://scout.cymru.com/docs/api#api-keys) document to generate an API key to use as an alternate form of authorization.\n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TeamCymruScout Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Upload csv with indictaors in Watchlist**\n\n Follow the steps in this section to upload csv containing indicators in watchlist:\n 1. In the Azure portal, Go to **Microsoft Sentinel** and select your workspace.\n 2. Go to **Watchlist** under **Configuration** section from left panel.\n 3. Click on **TeamCymruScoutDomainData**, and then select **Bulk update** from **Update watchlist**.\n 4. Upload your csv files with domain indicators in **Upload file** input and click on **Next: Review+Create**.\n 5. Once validation is successful, click on **Update**.\n 6. Follow the same steps to update *TeamCymruScoutIPData* watchlist for ip indicators. \n\n> **Reference link:** [Bulk update a watchlist](https://learn.microsoft.com/en-us/azure/sentinel/watchlists-manage#bulk-update-a-watchlist)""}, {""title"": """", ""description"": ""**STEP 7 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TeamCymruScout data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TeamCymruScout-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tLocation \n\t\tWorkspaceName \n\t\tFunction Name \n\t\tTeamCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tUsername \n\t\tPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAzureClientId \n\t\tAzureClientSecret \n\t\tTenantId \n\t\tAzureEntraObjectId \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TeamCymruScout data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TeamCymruScout310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CymruScoutXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tTeamCymruScoutUsername \n\t\tTeamCymruScoutPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAZURE_CLIENT_ID \n\t\tAZURE_CLIENT_SECRET \n\t\tAZURE_TENANT_ID \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n\t\tAZURE_DATA_COLLECTION_ENDPOINT \n\t\tAZURE_DATA_COLLECTION_RULE_ID_MAIN_TABLES \n\t\tAZURE_DATA_COLLECTION_RULE_ID_SUB_TABLES\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Permission to assign a role to the registered application"", ""description"": ""Permission to assign a role to the registered application in Microsoft Entra ID is required.""}, {""name"": ""Team Cymru Scout Credentials/permissions"", ""description"": ""Team Cymru Scout account credentials(Username, Password) is required.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" -"Cymru_Scout_IP_Data_Summary_Certs_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Team Cymru Scout API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create Team Cymru Scout API Key**\n\n Follow these instructions to create a Team Cymru Scout API Key.\n 1. Refer to the [API Keys](https://scout.cymru.com/docs/api#api-keys) document to generate an API key to use as an alternate form of authorization.\n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TeamCymruScout Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Upload csv with indictaors in Watchlist**\n\n Follow the steps in this section to upload csv containing indicators in watchlist:\n 1. In the Azure portal, Go to **Microsoft Sentinel** and select your workspace.\n 2. Go to **Watchlist** under **Configuration** section from left panel.\n 3. Click on **TeamCymruScoutDomainData**, and then select **Bulk update** from **Update watchlist**.\n 4. Upload your csv files with domain indicators in **Upload file** input and click on **Next: Review+Create**.\n 5. Once validation is successful, click on **Update**.\n 6. Follow the same steps to update *TeamCymruScoutIPData* watchlist for ip indicators. \n\n> **Reference link:** [Bulk update a watchlist](https://learn.microsoft.com/en-us/azure/sentinel/watchlists-manage#bulk-update-a-watchlist)""}, {""title"": """", ""description"": ""**STEP 7 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TeamCymruScout data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TeamCymruScout-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tLocation \n\t\tWorkspaceName \n\t\tFunction Name \n\t\tTeamCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tUsername \n\t\tPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAzureClientId \n\t\tAzureClientSecret \n\t\tTenantId \n\t\tAzureEntraObjectId \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TeamCymruScout data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TeamCymruScout310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CymruScoutXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tTeamCymruScoutUsername \n\t\tTeamCymruScoutPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAZURE_CLIENT_ID \n\t\tAZURE_CLIENT_SECRET \n\t\tAZURE_TENANT_ID \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n\t\tAZURE_DATA_COLLECTION_ENDPOINT \n\t\tAZURE_DATA_COLLECTION_RULE_ID_MAIN_TABLES \n\t\tAZURE_DATA_COLLECTION_RULE_ID_SUB_TABLES\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Permission to assign a role to the registered application"", ""description"": ""Permission to assign a role to the registered application in Microsoft Entra ID is required.""}, {""name"": ""Team Cymru Scout Credentials/permissions"", ""description"": ""Team Cymru Scout account credentials(Username, Password) is required.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" -"Cymru_Scout_IP_Data_Summary_Details_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Team Cymru Scout API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create Team Cymru Scout API Key**\n\n Follow these instructions to create a Team Cymru Scout API Key.\n 1. Refer to the [API Keys](https://scout.cymru.com/docs/api#api-keys) document to generate an API key to use as an alternate form of authorization.\n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TeamCymruScout Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Upload csv with indictaors in Watchlist**\n\n Follow the steps in this section to upload csv containing indicators in watchlist:\n 1. In the Azure portal, Go to **Microsoft Sentinel** and select your workspace.\n 2. Go to **Watchlist** under **Configuration** section from left panel.\n 3. Click on **TeamCymruScoutDomainData**, and then select **Bulk update** from **Update watchlist**.\n 4. Upload your csv files with domain indicators in **Upload file** input and click on **Next: Review+Create**.\n 5. Once validation is successful, click on **Update**.\n 6. Follow the same steps to update *TeamCymruScoutIPData* watchlist for ip indicators. \n\n> **Reference link:** [Bulk update a watchlist](https://learn.microsoft.com/en-us/azure/sentinel/watchlists-manage#bulk-update-a-watchlist)""}, {""title"": """", ""description"": ""**STEP 7 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TeamCymruScout data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TeamCymruScout-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tLocation \n\t\tWorkspaceName \n\t\tFunction Name \n\t\tTeamCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tUsername \n\t\tPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAzureClientId \n\t\tAzureClientSecret \n\t\tTenantId \n\t\tAzureEntraObjectId \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TeamCymruScout data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TeamCymruScout310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CymruScoutXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tTeamCymruScoutUsername \n\t\tTeamCymruScoutPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAZURE_CLIENT_ID \n\t\tAZURE_CLIENT_SECRET \n\t\tAZURE_TENANT_ID \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n\t\tAZURE_DATA_COLLECTION_ENDPOINT \n\t\tAZURE_DATA_COLLECTION_RULE_ID_MAIN_TABLES \n\t\tAZURE_DATA_COLLECTION_RULE_ID_SUB_TABLES\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Permission to assign a role to the registered application"", ""description"": ""Permission to assign a role to the registered application in Microsoft Entra ID is required.""}, {""name"": ""Team Cymru Scout Credentials/permissions"", ""description"": ""Team Cymru Scout account credentials(Username, Password) is required.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" -"Cymru_Scout_IP_Data_Summary_Fingerprints_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Team Cymru Scout API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create Team Cymru Scout API Key**\n\n Follow these instructions to create a Team Cymru Scout API Key.\n 1. Refer to the [API Keys](https://scout.cymru.com/docs/api#api-keys) document to generate an API key to use as an alternate form of authorization.\n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TeamCymruScout Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Upload csv with indictaors in Watchlist**\n\n Follow the steps in this section to upload csv containing indicators in watchlist:\n 1. In the Azure portal, Go to **Microsoft Sentinel** and select your workspace.\n 2. Go to **Watchlist** under **Configuration** section from left panel.\n 3. Click on **TeamCymruScoutDomainData**, and then select **Bulk update** from **Update watchlist**.\n 4. Upload your csv files with domain indicators in **Upload file** input and click on **Next: Review+Create**.\n 5. Once validation is successful, click on **Update**.\n 6. Follow the same steps to update *TeamCymruScoutIPData* watchlist for ip indicators. \n\n> **Reference link:** [Bulk update a watchlist](https://learn.microsoft.com/en-us/azure/sentinel/watchlists-manage#bulk-update-a-watchlist)""}, {""title"": """", ""description"": ""**STEP 7 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TeamCymruScout data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TeamCymruScout-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tLocation \n\t\tWorkspaceName \n\t\tFunction Name \n\t\tTeamCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tUsername \n\t\tPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAzureClientId \n\t\tAzureClientSecret \n\t\tTenantId \n\t\tAzureEntraObjectId \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TeamCymruScout data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TeamCymruScout310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CymruScoutXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tTeamCymruScoutUsername \n\t\tTeamCymruScoutPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAZURE_CLIENT_ID \n\t\tAZURE_CLIENT_SECRET \n\t\tAZURE_TENANT_ID \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n\t\tAZURE_DATA_COLLECTION_ENDPOINT \n\t\tAZURE_DATA_COLLECTION_RULE_ID_MAIN_TABLES \n\t\tAZURE_DATA_COLLECTION_RULE_ID_SUB_TABLES\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Permission to assign a role to the registered application"", ""description"": ""Permission to assign a role to the registered application in Microsoft Entra ID is required.""}, {""name"": ""Team Cymru Scout Credentials/permissions"", ""description"": ""Team Cymru Scout account credentials(Username, Password) is required.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" -"Cymru_Scout_IP_Data_Summary_OpenPorts_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Team Cymru Scout API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create Team Cymru Scout API Key**\n\n Follow these instructions to create a Team Cymru Scout API Key.\n 1. Refer to the [API Keys](https://scout.cymru.com/docs/api#api-keys) document to generate an API key to use as an alternate form of authorization.\n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TeamCymruScout Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Upload csv with indictaors in Watchlist**\n\n Follow the steps in this section to upload csv containing indicators in watchlist:\n 1. In the Azure portal, Go to **Microsoft Sentinel** and select your workspace.\n 2. Go to **Watchlist** under **Configuration** section from left panel.\n 3. Click on **TeamCymruScoutDomainData**, and then select **Bulk update** from **Update watchlist**.\n 4. Upload your csv files with domain indicators in **Upload file** input and click on **Next: Review+Create**.\n 5. Once validation is successful, click on **Update**.\n 6. Follow the same steps to update *TeamCymruScoutIPData* watchlist for ip indicators. \n\n> **Reference link:** [Bulk update a watchlist](https://learn.microsoft.com/en-us/azure/sentinel/watchlists-manage#bulk-update-a-watchlist)""}, {""title"": """", ""description"": ""**STEP 7 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TeamCymruScout data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TeamCymruScout-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tLocation \n\t\tWorkspaceName \n\t\tFunction Name \n\t\tTeamCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tUsername \n\t\tPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAzureClientId \n\t\tAzureClientSecret \n\t\tTenantId \n\t\tAzureEntraObjectId \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TeamCymruScout data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TeamCymruScout310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CymruScoutXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tTeamCymruScoutUsername \n\t\tTeamCymruScoutPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAZURE_CLIENT_ID \n\t\tAZURE_CLIENT_SECRET \n\t\tAZURE_TENANT_ID \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n\t\tAZURE_DATA_COLLECTION_ENDPOINT \n\t\tAZURE_DATA_COLLECTION_RULE_ID_MAIN_TABLES \n\t\tAZURE_DATA_COLLECTION_RULE_ID_SUB_TABLES\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Permission to assign a role to the registered application"", ""description"": ""Permission to assign a role to the registered application in Microsoft Entra ID is required.""}, {""name"": ""Team Cymru Scout Credentials/permissions"", ""description"": ""Team Cymru Scout account credentials(Username, Password) is required.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" -"Cymru_Scout_IP_Data_Summary_PDNS_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Team Cymru Scout API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create Team Cymru Scout API Key**\n\n Follow these instructions to create a Team Cymru Scout API Key.\n 1. Refer to the [API Keys](https://scout.cymru.com/docs/api#api-keys) document to generate an API key to use as an alternate form of authorization.\n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TeamCymruScout Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Upload csv with indictaors in Watchlist**\n\n Follow the steps in this section to upload csv containing indicators in watchlist:\n 1. In the Azure portal, Go to **Microsoft Sentinel** and select your workspace.\n 2. Go to **Watchlist** under **Configuration** section from left panel.\n 3. Click on **TeamCymruScoutDomainData**, and then select **Bulk update** from **Update watchlist**.\n 4. Upload your csv files with domain indicators in **Upload file** input and click on **Next: Review+Create**.\n 5. Once validation is successful, click on **Update**.\n 6. Follow the same steps to update *TeamCymruScoutIPData* watchlist for ip indicators. \n\n> **Reference link:** [Bulk update a watchlist](https://learn.microsoft.com/en-us/azure/sentinel/watchlists-manage#bulk-update-a-watchlist)""}, {""title"": """", ""description"": ""**STEP 7 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TeamCymruScout data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TeamCymruScout-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tLocation \n\t\tWorkspaceName \n\t\tFunction Name \n\t\tTeamCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tUsername \n\t\tPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAzureClientId \n\t\tAzureClientSecret \n\t\tTenantId \n\t\tAzureEntraObjectId \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TeamCymruScout data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TeamCymruScout310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CymruScoutXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tTeamCymruScoutUsername \n\t\tTeamCymruScoutPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAZURE_CLIENT_ID \n\t\tAZURE_CLIENT_SECRET \n\t\tAZURE_TENANT_ID \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n\t\tAZURE_DATA_COLLECTION_ENDPOINT \n\t\tAZURE_DATA_COLLECTION_RULE_ID_MAIN_TABLES \n\t\tAZURE_DATA_COLLECTION_RULE_ID_SUB_TABLES\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Permission to assign a role to the registered application"", ""description"": ""Permission to assign a role to the registered application in Microsoft Entra ID is required.""}, {""name"": ""Team Cymru Scout Credentials/permissions"", ""description"": ""Team Cymru Scout account credentials(Username, Password) is required.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" -"Cymru_Scout_IP_Data_x509_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Team Cymru Scout API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create Team Cymru Scout API Key**\n\n Follow these instructions to create a Team Cymru Scout API Key.\n 1. Refer to the [API Keys](https://scout.cymru.com/docs/api#api-keys) document to generate an API key to use as an alternate form of authorization.\n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TeamCymruScout Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Upload csv with indictaors in Watchlist**\n\n Follow the steps in this section to upload csv containing indicators in watchlist:\n 1. In the Azure portal, Go to **Microsoft Sentinel** and select your workspace.\n 2. Go to **Watchlist** under **Configuration** section from left panel.\n 3. Click on **TeamCymruScoutDomainData**, and then select **Bulk update** from **Update watchlist**.\n 4. Upload your csv files with domain indicators in **Upload file** input and click on **Next: Review+Create**.\n 5. Once validation is successful, click on **Update**.\n 6. Follow the same steps to update *TeamCymruScoutIPData* watchlist for ip indicators. \n\n> **Reference link:** [Bulk update a watchlist](https://learn.microsoft.com/en-us/azure/sentinel/watchlists-manage#bulk-update-a-watchlist)""}, {""title"": """", ""description"": ""**STEP 7 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TeamCymruScout data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TeamCymruScout-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tLocation \n\t\tWorkspaceName \n\t\tFunction Name \n\t\tTeamCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tUsername \n\t\tPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAzureClientId \n\t\tAzureClientSecret \n\t\tTenantId \n\t\tAzureEntraObjectId \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TeamCymruScout data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TeamCymruScout310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CymruScoutXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tTeamCymruScoutUsername \n\t\tTeamCymruScoutPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAZURE_CLIENT_ID \n\t\tAZURE_CLIENT_SECRET \n\t\tAZURE_TENANT_ID \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n\t\tAZURE_DATA_COLLECTION_ENDPOINT \n\t\tAZURE_DATA_COLLECTION_RULE_ID_MAIN_TABLES \n\t\tAZURE_DATA_COLLECTION_RULE_ID_SUB_TABLES\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Permission to assign a role to the registered application"", ""description"": ""Permission to assign a role to the registered application in Microsoft Entra ID is required.""}, {""name"": ""Team Cymru Scout Credentials/permissions"", ""description"": ""Team Cymru Scout account credentials(Username, Password) is required.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" -"","Teams","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Teams","sentinel4teams","sentinelforteams","2022-02-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"","Templates","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Templates","","","","","","","","","","","","","","","","","false","","false" -"","Tenable App","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tenable%20App","tenable","tenable-sentinel-integration","2024-06-06","2025-06-19","","Tenable","Partner","https://www.tenable.com/support/technical-support","","domains","","","","","","","false","","false" -"Tenable_IE_CL","Tenable App","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tenable%20App","tenable","tenable-sentinel-integration","2024-06-06","2025-06-19","","Tenable","Partner","https://www.tenable.com/support/technical-support","","domains","TenableIE","Tenable","Tenable Identity Exposure","Tenable Identity Exposure connector allows Indicators of Exposure, Indicators of Attack and trailflow logs to be ingested into Microsoft Sentinel.The different work books and data parsers allow you to more easily manipulate logs and monitor your Active Directory environment. The analytic templates allow you to automate responses regarding different events, exposures and attacks.","[{""title"": """", ""description"": "">This data connector depends on [afad_parser](https://aka.ms/sentinel-TenableApp-afad-parser) based on a Kusto Function to work as expected which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Configure the Syslog server"", ""description"": ""You will first need a **linux Syslog** server that TenableIE will send logs to. Typically you can run **rsyslog** on **Ubuntu**.\n You can then configure this server as you wish, but it is recommended to be able to output TenableIE logs in a separate file.\n\nConfigure rsyslog to accept logs from your TenableIE IP address.:\n\n```shell\nsudo -i\n\n# Set TenableIE source IP address\nexport TENABLE_IE_IP={Enter your IP address}\n\n# Create rsyslog configuration file\ncat > /etc/rsyslog.d/80-tenable.conf << EOF\n\\$ModLoad imudp\n\\$UDPServerRun 514\n\\$ModLoad imtcp\n\\$InputTCPServerRun 514\n\\$AllowedSender TCP, 127.0.0.1, $TENABLE_IE_IP\n\\$AllowedSender UDP, 127.0.0.1, $TENABLE_IE_IP\n\\$template MsgTemplate,\""%TIMESTAMP:::date-rfc3339% %HOSTNAME% %programname%[%procid%]:%msg%\\n\""\n\\$template remote-incoming-logs, \""/var/log/%PROGRAMNAME%.log\""\n*.* ?remote-incoming-logs;MsgTemplate\nEOF\n\n# Restart rsyslog\nsystemctl restart rsyslog\n```""}, {""title"": ""2. Install and onboard the Microsoft agent for Linux"", ""description"": ""The OMS agent will receive the TenableIE syslog events and publish it in Microsoft Sentinel :"", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""3. Check agent logs on the Syslog server"", ""description"": ""```shell\ntail -f /var/opt/microsoft/omsagent/log/omsagent.log\n```""}, {""title"": ""4. Configure TenableIE to send logs to your Syslog server"", ""description"": ""On your **TenableIE** portal, go to *System*, *Configuration* and then *Syslog*.\nFrom there you can create a new Syslog alert toward your Syslog server.\n\nOnce this is done, check that the logs are correctly gathered on your server in a separate file (to do this, you can use the *Test the configuration* button in the Syslog alert configuration in TenableIE).\nIf you used the Quickstart template, the Syslog server will by default listen on port 514 in UDP and 1514 in TCP, without TLS.""}, {""title"": ""5. Configure the custom logs"", ""description"": ""Configure the agent to collect the logs.\n\n1. In Microsoft Sentinel, go to **Configuration** -> **Settings** -> **Workspace settings** -> **Custom logs**.\n2. Click **Add custom log**.\n3. Upload a sample TenableIE.log Syslog file from the **Linux** machine running the **Syslog** server and click **Next**\n4. Set the record delimiter to **New Line** if not already the case and click **Next**.\n5. Select **Linux** and enter the file path to the **Syslog** file, click **+** then **Next**. The default location of the file is `/var/log/TenableIE.log` if you have a Tenable version <3.1.0, you must also add this linux file location `/var/log/AlsidForAD.log`.\n6. Set the **Name** to *Tenable_IE_CL* (Azure automatically adds *_CL* at the end of the name, there must be only one, make sure the name is not *Tenable_IE_CL_CL*).\n7. Click **Next**, you will see a resume, then click **Create**\n"", ""instructions"": []}, {""title"": ""6. Enjoy !"", ""description"": ""> You should now be able to receive logs in the *Tenable_IE_CL* table, logs data can be parse using the **afad_parser()** function, used by all query samples, workbooks and analytic templates.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Access to TenableIE Configuration"", ""description"": ""Permissions to configure syslog alerting engine""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tenable%20App/Data%20Connectors/TenableIE/TenableIE.json","true" -"Tenable_VM_Asset_CL","Tenable App","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tenable%20App","tenable","tenable-sentinel-integration","2024-06-06","2025-06-19","","Tenable","Partner","https://www.tenable.com/support/technical-support","","domains","TenableVM","Tenable","Tenable Vulnerability Management","The TVM data connector provides the ability to ingest Asset, Vulnerability, Compliance, WAS assets and WAS vulnerabilities data into Microsoft Sentinel using TVM REST APIs. Refer to [API documentation](https://developer.tenable.com/reference) for more information. The connector provides the ability to get data which helps to examine potential security risks, get insight into your computing assets, diagnose configuration problems and more","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Durable Functions to connect to the TenableVM API to pull [assets](https://developer.tenable.com/reference#exports-assets-download-chunk), [vulnerabilities](https://developer.tenable.com/reference#exports-vulns-request-export) and [compliance](https://developer.tenable.com/reference#exports-compliance-request-export)(if selected) at a regular interval into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a [**TenableVM parser for vulnerabilities**](https://aka.ms/sentinel-TenableApp-TenableVMVulnerabilities-parser) and a [**TenableVM parser for assets**](https://aka.ms/sentinel-TenableApp-TenableVMAssets-parser) based on a Kusto Function to work as expected which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for TenableVM**\n\n [Follow the instructions](https://docs.tenable.com/vulnerability-management/Content/Settings/my-account/GenerateAPIKey.htm) to obtain the required API credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TenableVM Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function App**\n\n""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TenableVM Vulnerability Management Report data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TenableVM-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-TenableVM-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group**, **FunctionApp Name** and **Location**. \n3. Enter the below information : \n\n\t a. **WorkspaceName** - Enter the Workspace Name of the log analytics Workspace. \n\n\t b. **TenableAccessKey** - Enter Access key for using the Tenable API. \n\n\t c. **TenableSecretKey** - Enter Tenable Secret Key for Authentication. \n\n\t d. **AzureClientID** - Enter Azure Client ID. \n\n\t e. **AzureClientSecret** - Enter Azure Client Secret. \n\n\t f. **TenantID** - Enter Tenant ID got from above steps. \n\n\t g. **AzureEntraObjectId** - Enter Azure Object ID got from above steps. \n\n\t h. **LowestSeveritytoStore** - Lowest vulnerability severity to store. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t i. **ComplianceDataIngestion** - Select true if you want to enable Compliance data ingestion from Tenable VM. Default is false. \n\n\t j. **WASAssetDataIngestion** - Select true if you want to enable WAS Asset data ingestion from Tenable VM. Default is false. \n\n\t k. **WASVulnerabilityDataIngestion** - Select true if you want to enable WAS Vulnerability data ingestion from Tenable VM. Default is false. \n\n\t l. **LowestSeveritytoStoreWAS** - The Lowest Vulnerability severity to store for WAS. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t m. **TenableExportScheduleInMinutes** - Schedule in minutes to create new export job from Tenable VM. Default is 1440. \n\n\t n. **AssetTableName** - Enter name of the table used to store Asset Data logs. \n\n\t o. **VulnTableName** - Enter name of the table used to store Vulnerability Data logs. \n\n\t p. **ComplianceTableName** - Enter name of the table used to store Compliance Data logs. \n\n\t q. **WASAssetTableName** - Enter name of the table used to store WAS Asset Data logs. \n\n\t r. **WASVulnTableName** - Enter name of the table used to store WAS Vulnerability Data logs. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TenableVM Vulnerability Management Report data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TenableVMAzureSentinelConnector310Updated-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. TenableVMXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\n\t a. **WorkspaceName** - Enter the Workspace Name of the log analytics Workspace. \n\n\t b. **TenableAccessKey** - Enter Access key for using the Tenable API. \n\n\t c. **TenableSecretKey** - Enter Tenable Secret Key for Authentication. \n\n\t d. **AzureClientID** - Enter Azure Client ID. \n\n\t e. **AzureClientSecret** - Enter Azure Client Secret. \n\n\t f. **TenantID** - Enter Tenant ID got from above steps. \n\n\t g. **AzureEntraObjectId** - Enter Azure Object ID got from above steps. \n\n\t h. **LowestSeveritytoStore** - Lowest vulnerability severity to store. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t i. **ComplianceDataIngestion** - Select true if you want to enable Compliance data ingestion from Tenable VM. Default is false. \n\n\t j. **WASAssetDataIngestion** - Select true if you want to enable WAS Asset data ingestion from Tenable VM. Default is false. \n\n\t k. **WASVulnerabilityDataIngestion** - Select true if you want to enable WAS Vulnerability data ingestion from Tenable VM. Default is false. \n\n\t l. **LowestSeveritytoStoreWAS** - The Lowest Vulnerability severity to store for WAS. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t m. **TenableExportScheduleInMinutes** - Schedule in minutes to create new export job from Tenable VM. Default is 1440. \n\n\t n. **AssetTableName** - Enter name of the table used to store Asset Data logs. \n\n\t o. **VulnTableName** - Enter name of the table used to store Vulnerability Data logs. \n\n\t p. **ComplianceTableName** - Enter name of the table used to store Compliance Data logs. \n\n\t q. **WASAssetTableName** - Enter name of the table used to store WAS Asset Data logs. \n\n\t r. **WASVulnTableName** - Enter name of the table used to store WAS Vulnerability Data logs. \n\n\t s. **PyTenableUAVendor** - Value must be set to **Microsoft**. \n\n\t t. **PyTenableUAProduct** - Value must be set to **Azure Sentinel**. \n\n\t u. **PyTenableUABuild** - Value must be set to **0.0.1**.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""Both a **TenableAccessKey** and a **TenableSecretKey** is required to access the Tenable REST API. [See the documentation to learn more about API](https://developer.tenable.com/reference#vulnerability-management). Check all [requirements and follow the instructions](https://docs.tenable.com/vulnerability-management/Content/Settings/my-account/GenerateAPIKey.htm) for obtaining credentials.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tenable%20App/Data%20Connectors/TenableVM/TenableVM.json","true" -"Tenable_VM_Compliance_CL","Tenable App","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tenable%20App","tenable","tenable-sentinel-integration","2024-06-06","2025-06-19","","Tenable","Partner","https://www.tenable.com/support/technical-support","","domains","TenableVM","Tenable","Tenable Vulnerability Management","The TVM data connector provides the ability to ingest Asset, Vulnerability, Compliance, WAS assets and WAS vulnerabilities data into Microsoft Sentinel using TVM REST APIs. Refer to [API documentation](https://developer.tenable.com/reference) for more information. The connector provides the ability to get data which helps to examine potential security risks, get insight into your computing assets, diagnose configuration problems and more","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Durable Functions to connect to the TenableVM API to pull [assets](https://developer.tenable.com/reference#exports-assets-download-chunk), [vulnerabilities](https://developer.tenable.com/reference#exports-vulns-request-export) and [compliance](https://developer.tenable.com/reference#exports-compliance-request-export)(if selected) at a regular interval into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a [**TenableVM parser for vulnerabilities**](https://aka.ms/sentinel-TenableApp-TenableVMVulnerabilities-parser) and a [**TenableVM parser for assets**](https://aka.ms/sentinel-TenableApp-TenableVMAssets-parser) based on a Kusto Function to work as expected which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for TenableVM**\n\n [Follow the instructions](https://docs.tenable.com/vulnerability-management/Content/Settings/my-account/GenerateAPIKey.htm) to obtain the required API credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TenableVM Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function App**\n\n""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TenableVM Vulnerability Management Report data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TenableVM-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-TenableVM-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group**, **FunctionApp Name** and **Location**. \n3. Enter the below information : \n\n\t a. **WorkspaceName** - Enter the Workspace Name of the log analytics Workspace. \n\n\t b. **TenableAccessKey** - Enter Access key for using the Tenable API. \n\n\t c. **TenableSecretKey** - Enter Tenable Secret Key for Authentication. \n\n\t d. **AzureClientID** - Enter Azure Client ID. \n\n\t e. **AzureClientSecret** - Enter Azure Client Secret. \n\n\t f. **TenantID** - Enter Tenant ID got from above steps. \n\n\t g. **AzureEntraObjectId** - Enter Azure Object ID got from above steps. \n\n\t h. **LowestSeveritytoStore** - Lowest vulnerability severity to store. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t i. **ComplianceDataIngestion** - Select true if you want to enable Compliance data ingestion from Tenable VM. Default is false. \n\n\t j. **WASAssetDataIngestion** - Select true if you want to enable WAS Asset data ingestion from Tenable VM. Default is false. \n\n\t k. **WASVulnerabilityDataIngestion** - Select true if you want to enable WAS Vulnerability data ingestion from Tenable VM. Default is false. \n\n\t l. **LowestSeveritytoStoreWAS** - The Lowest Vulnerability severity to store for WAS. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t m. **TenableExportScheduleInMinutes** - Schedule in minutes to create new export job from Tenable VM. Default is 1440. \n\n\t n. **AssetTableName** - Enter name of the table used to store Asset Data logs. \n\n\t o. **VulnTableName** - Enter name of the table used to store Vulnerability Data logs. \n\n\t p. **ComplianceTableName** - Enter name of the table used to store Compliance Data logs. \n\n\t q. **WASAssetTableName** - Enter name of the table used to store WAS Asset Data logs. \n\n\t r. **WASVulnTableName** - Enter name of the table used to store WAS Vulnerability Data logs. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TenableVM Vulnerability Management Report data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TenableVMAzureSentinelConnector310Updated-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. TenableVMXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\n\t a. **WorkspaceName** - Enter the Workspace Name of the log analytics Workspace. \n\n\t b. **TenableAccessKey** - Enter Access key for using the Tenable API. \n\n\t c. **TenableSecretKey** - Enter Tenable Secret Key for Authentication. \n\n\t d. **AzureClientID** - Enter Azure Client ID. \n\n\t e. **AzureClientSecret** - Enter Azure Client Secret. \n\n\t f. **TenantID** - Enter Tenant ID got from above steps. \n\n\t g. **AzureEntraObjectId** - Enter Azure Object ID got from above steps. \n\n\t h. **LowestSeveritytoStore** - Lowest vulnerability severity to store. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t i. **ComplianceDataIngestion** - Select true if you want to enable Compliance data ingestion from Tenable VM. Default is false. \n\n\t j. **WASAssetDataIngestion** - Select true if you want to enable WAS Asset data ingestion from Tenable VM. Default is false. \n\n\t k. **WASVulnerabilityDataIngestion** - Select true if you want to enable WAS Vulnerability data ingestion from Tenable VM. Default is false. \n\n\t l. **LowestSeveritytoStoreWAS** - The Lowest Vulnerability severity to store for WAS. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t m. **TenableExportScheduleInMinutes** - Schedule in minutes to create new export job from Tenable VM. Default is 1440. \n\n\t n. **AssetTableName** - Enter name of the table used to store Asset Data logs. \n\n\t o. **VulnTableName** - Enter name of the table used to store Vulnerability Data logs. \n\n\t p. **ComplianceTableName** - Enter name of the table used to store Compliance Data logs. \n\n\t q. **WASAssetTableName** - Enter name of the table used to store WAS Asset Data logs. \n\n\t r. **WASVulnTableName** - Enter name of the table used to store WAS Vulnerability Data logs. \n\n\t s. **PyTenableUAVendor** - Value must be set to **Microsoft**. \n\n\t t. **PyTenableUAProduct** - Value must be set to **Azure Sentinel**. \n\n\t u. **PyTenableUABuild** - Value must be set to **0.0.1**.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""Both a **TenableAccessKey** and a **TenableSecretKey** is required to access the Tenable REST API. [See the documentation to learn more about API](https://developer.tenable.com/reference#vulnerability-management). Check all [requirements and follow the instructions](https://docs.tenable.com/vulnerability-management/Content/Settings/my-account/GenerateAPIKey.htm) for obtaining credentials.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tenable%20App/Data%20Connectors/TenableVM/TenableVM.json","true" -"Tenable_VM_Vuln_CL","Tenable App","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tenable%20App","tenable","tenable-sentinel-integration","2024-06-06","2025-06-19","","Tenable","Partner","https://www.tenable.com/support/technical-support","","domains","TenableVM","Tenable","Tenable Vulnerability Management","The TVM data connector provides the ability to ingest Asset, Vulnerability, Compliance, WAS assets and WAS vulnerabilities data into Microsoft Sentinel using TVM REST APIs. Refer to [API documentation](https://developer.tenable.com/reference) for more information. The connector provides the ability to get data which helps to examine potential security risks, get insight into your computing assets, diagnose configuration problems and more","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Durable Functions to connect to the TenableVM API to pull [assets](https://developer.tenable.com/reference#exports-assets-download-chunk), [vulnerabilities](https://developer.tenable.com/reference#exports-vulns-request-export) and [compliance](https://developer.tenable.com/reference#exports-compliance-request-export)(if selected) at a regular interval into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a [**TenableVM parser for vulnerabilities**](https://aka.ms/sentinel-TenableApp-TenableVMVulnerabilities-parser) and a [**TenableVM parser for assets**](https://aka.ms/sentinel-TenableApp-TenableVMAssets-parser) based on a Kusto Function to work as expected which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for TenableVM**\n\n [Follow the instructions](https://docs.tenable.com/vulnerability-management/Content/Settings/my-account/GenerateAPIKey.htm) to obtain the required API credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TenableVM Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function App**\n\n""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TenableVM Vulnerability Management Report data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TenableVM-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-TenableVM-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group**, **FunctionApp Name** and **Location**. \n3. Enter the below information : \n\n\t a. **WorkspaceName** - Enter the Workspace Name of the log analytics Workspace. \n\n\t b. **TenableAccessKey** - Enter Access key for using the Tenable API. \n\n\t c. **TenableSecretKey** - Enter Tenable Secret Key for Authentication. \n\n\t d. **AzureClientID** - Enter Azure Client ID. \n\n\t e. **AzureClientSecret** - Enter Azure Client Secret. \n\n\t f. **TenantID** - Enter Tenant ID got from above steps. \n\n\t g. **AzureEntraObjectId** - Enter Azure Object ID got from above steps. \n\n\t h. **LowestSeveritytoStore** - Lowest vulnerability severity to store. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t i. **ComplianceDataIngestion** - Select true if you want to enable Compliance data ingestion from Tenable VM. Default is false. \n\n\t j. **WASAssetDataIngestion** - Select true if you want to enable WAS Asset data ingestion from Tenable VM. Default is false. \n\n\t k. **WASVulnerabilityDataIngestion** - Select true if you want to enable WAS Vulnerability data ingestion from Tenable VM. Default is false. \n\n\t l. **LowestSeveritytoStoreWAS** - The Lowest Vulnerability severity to store for WAS. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t m. **TenableExportScheduleInMinutes** - Schedule in minutes to create new export job from Tenable VM. Default is 1440. \n\n\t n. **AssetTableName** - Enter name of the table used to store Asset Data logs. \n\n\t o. **VulnTableName** - Enter name of the table used to store Vulnerability Data logs. \n\n\t p. **ComplianceTableName** - Enter name of the table used to store Compliance Data logs. \n\n\t q. **WASAssetTableName** - Enter name of the table used to store WAS Asset Data logs. \n\n\t r. **WASVulnTableName** - Enter name of the table used to store WAS Vulnerability Data logs. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TenableVM Vulnerability Management Report data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TenableVMAzureSentinelConnector310Updated-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. TenableVMXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\n\t a. **WorkspaceName** - Enter the Workspace Name of the log analytics Workspace. \n\n\t b. **TenableAccessKey** - Enter Access key for using the Tenable API. \n\n\t c. **TenableSecretKey** - Enter Tenable Secret Key for Authentication. \n\n\t d. **AzureClientID** - Enter Azure Client ID. \n\n\t e. **AzureClientSecret** - Enter Azure Client Secret. \n\n\t f. **TenantID** - Enter Tenant ID got from above steps. \n\n\t g. **AzureEntraObjectId** - Enter Azure Object ID got from above steps. \n\n\t h. **LowestSeveritytoStore** - Lowest vulnerability severity to store. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t i. **ComplianceDataIngestion** - Select true if you want to enable Compliance data ingestion from Tenable VM. Default is false. \n\n\t j. **WASAssetDataIngestion** - Select true if you want to enable WAS Asset data ingestion from Tenable VM. Default is false. \n\n\t k. **WASVulnerabilityDataIngestion** - Select true if you want to enable WAS Vulnerability data ingestion from Tenable VM. Default is false. \n\n\t l. **LowestSeveritytoStoreWAS** - The Lowest Vulnerability severity to store for WAS. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t m. **TenableExportScheduleInMinutes** - Schedule in minutes to create new export job from Tenable VM. Default is 1440. \n\n\t n. **AssetTableName** - Enter name of the table used to store Asset Data logs. \n\n\t o. **VulnTableName** - Enter name of the table used to store Vulnerability Data logs. \n\n\t p. **ComplianceTableName** - Enter name of the table used to store Compliance Data logs. \n\n\t q. **WASAssetTableName** - Enter name of the table used to store WAS Asset Data logs. \n\n\t r. **WASVulnTableName** - Enter name of the table used to store WAS Vulnerability Data logs. \n\n\t s. **PyTenableUAVendor** - Value must be set to **Microsoft**. \n\n\t t. **PyTenableUAProduct** - Value must be set to **Azure Sentinel**. \n\n\t u. **PyTenableUABuild** - Value must be set to **0.0.1**.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""Both a **TenableAccessKey** and a **TenableSecretKey** is required to access the Tenable REST API. [See the documentation to learn more about API](https://developer.tenable.com/reference#vulnerability-management). Check all [requirements and follow the instructions](https://docs.tenable.com/vulnerability-management/Content/Settings/my-account/GenerateAPIKey.htm) for obtaining credentials.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tenable%20App/Data%20Connectors/TenableVM/TenableVM.json","true" -"Tenable_WAS_Asset_CL","Tenable App","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tenable%20App","tenable","tenable-sentinel-integration","2024-06-06","2025-06-19","","Tenable","Partner","https://www.tenable.com/support/technical-support","","domains","TenableVM","Tenable","Tenable Vulnerability Management","The TVM data connector provides the ability to ingest Asset, Vulnerability, Compliance, WAS assets and WAS vulnerabilities data into Microsoft Sentinel using TVM REST APIs. Refer to [API documentation](https://developer.tenable.com/reference) for more information. The connector provides the ability to get data which helps to examine potential security risks, get insight into your computing assets, diagnose configuration problems and more","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Durable Functions to connect to the TenableVM API to pull [assets](https://developer.tenable.com/reference#exports-assets-download-chunk), [vulnerabilities](https://developer.tenable.com/reference#exports-vulns-request-export) and [compliance](https://developer.tenable.com/reference#exports-compliance-request-export)(if selected) at a regular interval into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a [**TenableVM parser for vulnerabilities**](https://aka.ms/sentinel-TenableApp-TenableVMVulnerabilities-parser) and a [**TenableVM parser for assets**](https://aka.ms/sentinel-TenableApp-TenableVMAssets-parser) based on a Kusto Function to work as expected which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for TenableVM**\n\n [Follow the instructions](https://docs.tenable.com/vulnerability-management/Content/Settings/my-account/GenerateAPIKey.htm) to obtain the required API credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TenableVM Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function App**\n\n""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TenableVM Vulnerability Management Report data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TenableVM-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-TenableVM-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group**, **FunctionApp Name** and **Location**. \n3. Enter the below information : \n\n\t a. **WorkspaceName** - Enter the Workspace Name of the log analytics Workspace. \n\n\t b. **TenableAccessKey** - Enter Access key for using the Tenable API. \n\n\t c. **TenableSecretKey** - Enter Tenable Secret Key for Authentication. \n\n\t d. **AzureClientID** - Enter Azure Client ID. \n\n\t e. **AzureClientSecret** - Enter Azure Client Secret. \n\n\t f. **TenantID** - Enter Tenant ID got from above steps. \n\n\t g. **AzureEntraObjectId** - Enter Azure Object ID got from above steps. \n\n\t h. **LowestSeveritytoStore** - Lowest vulnerability severity to store. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t i. **ComplianceDataIngestion** - Select true if you want to enable Compliance data ingestion from Tenable VM. Default is false. \n\n\t j. **WASAssetDataIngestion** - Select true if you want to enable WAS Asset data ingestion from Tenable VM. Default is false. \n\n\t k. **WASVulnerabilityDataIngestion** - Select true if you want to enable WAS Vulnerability data ingestion from Tenable VM. Default is false. \n\n\t l. **LowestSeveritytoStoreWAS** - The Lowest Vulnerability severity to store for WAS. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t m. **TenableExportScheduleInMinutes** - Schedule in minutes to create new export job from Tenable VM. Default is 1440. \n\n\t n. **AssetTableName** - Enter name of the table used to store Asset Data logs. \n\n\t o. **VulnTableName** - Enter name of the table used to store Vulnerability Data logs. \n\n\t p. **ComplianceTableName** - Enter name of the table used to store Compliance Data logs. \n\n\t q. **WASAssetTableName** - Enter name of the table used to store WAS Asset Data logs. \n\n\t r. **WASVulnTableName** - Enter name of the table used to store WAS Vulnerability Data logs. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TenableVM Vulnerability Management Report data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TenableVMAzureSentinelConnector310Updated-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. TenableVMXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\n\t a. **WorkspaceName** - Enter the Workspace Name of the log analytics Workspace. \n\n\t b. **TenableAccessKey** - Enter Access key for using the Tenable API. \n\n\t c. **TenableSecretKey** - Enter Tenable Secret Key for Authentication. \n\n\t d. **AzureClientID** - Enter Azure Client ID. \n\n\t e. **AzureClientSecret** - Enter Azure Client Secret. \n\n\t f. **TenantID** - Enter Tenant ID got from above steps. \n\n\t g. **AzureEntraObjectId** - Enter Azure Object ID got from above steps. \n\n\t h. **LowestSeveritytoStore** - Lowest vulnerability severity to store. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t i. **ComplianceDataIngestion** - Select true if you want to enable Compliance data ingestion from Tenable VM. Default is false. \n\n\t j. **WASAssetDataIngestion** - Select true if you want to enable WAS Asset data ingestion from Tenable VM. Default is false. \n\n\t k. **WASVulnerabilityDataIngestion** - Select true if you want to enable WAS Vulnerability data ingestion from Tenable VM. Default is false. \n\n\t l. **LowestSeveritytoStoreWAS** - The Lowest Vulnerability severity to store for WAS. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t m. **TenableExportScheduleInMinutes** - Schedule in minutes to create new export job from Tenable VM. Default is 1440. \n\n\t n. **AssetTableName** - Enter name of the table used to store Asset Data logs. \n\n\t o. **VulnTableName** - Enter name of the table used to store Vulnerability Data logs. \n\n\t p. **ComplianceTableName** - Enter name of the table used to store Compliance Data logs. \n\n\t q. **WASAssetTableName** - Enter name of the table used to store WAS Asset Data logs. \n\n\t r. **WASVulnTableName** - Enter name of the table used to store WAS Vulnerability Data logs. \n\n\t s. **PyTenableUAVendor** - Value must be set to **Microsoft**. \n\n\t t. **PyTenableUAProduct** - Value must be set to **Azure Sentinel**. \n\n\t u. **PyTenableUABuild** - Value must be set to **0.0.1**.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""Both a **TenableAccessKey** and a **TenableSecretKey** is required to access the Tenable REST API. [See the documentation to learn more about API](https://developer.tenable.com/reference#vulnerability-management). Check all [requirements and follow the instructions](https://docs.tenable.com/vulnerability-management/Content/Settings/my-account/GenerateAPIKey.htm) for obtaining credentials.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tenable%20App/Data%20Connectors/TenableVM/TenableVM.json","true" -"Tenable_WAS_Vuln_CL","Tenable App","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tenable%20App","tenable","tenable-sentinel-integration","2024-06-06","2025-06-19","","Tenable","Partner","https://www.tenable.com/support/technical-support","","domains","TenableVM","Tenable","Tenable Vulnerability Management","The TVM data connector provides the ability to ingest Asset, Vulnerability, Compliance, WAS assets and WAS vulnerabilities data into Microsoft Sentinel using TVM REST APIs. Refer to [API documentation](https://developer.tenable.com/reference) for more information. The connector provides the ability to get data which helps to examine potential security risks, get insight into your computing assets, diagnose configuration problems and more","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Durable Functions to connect to the TenableVM API to pull [assets](https://developer.tenable.com/reference#exports-assets-download-chunk), [vulnerabilities](https://developer.tenable.com/reference#exports-vulns-request-export) and [compliance](https://developer.tenable.com/reference#exports-compliance-request-export)(if selected) at a regular interval into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a [**TenableVM parser for vulnerabilities**](https://aka.ms/sentinel-TenableApp-TenableVMVulnerabilities-parser) and a [**TenableVM parser for assets**](https://aka.ms/sentinel-TenableApp-TenableVMAssets-parser) based on a Kusto Function to work as expected which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for TenableVM**\n\n [Follow the instructions](https://docs.tenable.com/vulnerability-management/Content/Settings/my-account/GenerateAPIKey.htm) to obtain the required API credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TenableVM Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function App**\n\n""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TenableVM Vulnerability Management Report data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TenableVM-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-TenableVM-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group**, **FunctionApp Name** and **Location**. \n3. Enter the below information : \n\n\t a. **WorkspaceName** - Enter the Workspace Name of the log analytics Workspace. \n\n\t b. **TenableAccessKey** - Enter Access key for using the Tenable API. \n\n\t c. **TenableSecretKey** - Enter Tenable Secret Key for Authentication. \n\n\t d. **AzureClientID** - Enter Azure Client ID. \n\n\t e. **AzureClientSecret** - Enter Azure Client Secret. \n\n\t f. **TenantID** - Enter Tenant ID got from above steps. \n\n\t g. **AzureEntraObjectId** - Enter Azure Object ID got from above steps. \n\n\t h. **LowestSeveritytoStore** - Lowest vulnerability severity to store. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t i. **ComplianceDataIngestion** - Select true if you want to enable Compliance data ingestion from Tenable VM. Default is false. \n\n\t j. **WASAssetDataIngestion** - Select true if you want to enable WAS Asset data ingestion from Tenable VM. Default is false. \n\n\t k. **WASVulnerabilityDataIngestion** - Select true if you want to enable WAS Vulnerability data ingestion from Tenable VM. Default is false. \n\n\t l. **LowestSeveritytoStoreWAS** - The Lowest Vulnerability severity to store for WAS. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t m. **TenableExportScheduleInMinutes** - Schedule in minutes to create new export job from Tenable VM. Default is 1440. \n\n\t n. **AssetTableName** - Enter name of the table used to store Asset Data logs. \n\n\t o. **VulnTableName** - Enter name of the table used to store Vulnerability Data logs. \n\n\t p. **ComplianceTableName** - Enter name of the table used to store Compliance Data logs. \n\n\t q. **WASAssetTableName** - Enter name of the table used to store WAS Asset Data logs. \n\n\t r. **WASVulnTableName** - Enter name of the table used to store WAS Vulnerability Data logs. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TenableVM Vulnerability Management Report data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TenableVMAzureSentinelConnector310Updated-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. TenableVMXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\n\t a. **WorkspaceName** - Enter the Workspace Name of the log analytics Workspace. \n\n\t b. **TenableAccessKey** - Enter Access key for using the Tenable API. \n\n\t c. **TenableSecretKey** - Enter Tenable Secret Key for Authentication. \n\n\t d. **AzureClientID** - Enter Azure Client ID. \n\n\t e. **AzureClientSecret** - Enter Azure Client Secret. \n\n\t f. **TenantID** - Enter Tenant ID got from above steps. \n\n\t g. **AzureEntraObjectId** - Enter Azure Object ID got from above steps. \n\n\t h. **LowestSeveritytoStore** - Lowest vulnerability severity to store. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t i. **ComplianceDataIngestion** - Select true if you want to enable Compliance data ingestion from Tenable VM. Default is false. \n\n\t j. **WASAssetDataIngestion** - Select true if you want to enable WAS Asset data ingestion from Tenable VM. Default is false. \n\n\t k. **WASVulnerabilityDataIngestion** - Select true if you want to enable WAS Vulnerability data ingestion from Tenable VM. Default is false. \n\n\t l. **LowestSeveritytoStoreWAS** - The Lowest Vulnerability severity to store for WAS. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t m. **TenableExportScheduleInMinutes** - Schedule in minutes to create new export job from Tenable VM. Default is 1440. \n\n\t n. **AssetTableName** - Enter name of the table used to store Asset Data logs. \n\n\t o. **VulnTableName** - Enter name of the table used to store Vulnerability Data logs. \n\n\t p. **ComplianceTableName** - Enter name of the table used to store Compliance Data logs. \n\n\t q. **WASAssetTableName** - Enter name of the table used to store WAS Asset Data logs. \n\n\t r. **WASVulnTableName** - Enter name of the table used to store WAS Vulnerability Data logs. \n\n\t s. **PyTenableUAVendor** - Value must be set to **Microsoft**. \n\n\t t. **PyTenableUAProduct** - Value must be set to **Azure Sentinel**. \n\n\t u. **PyTenableUABuild** - Value must be set to **0.0.1**.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""Both a **TenableAccessKey** and a **TenableSecretKey** is required to access the Tenable REST API. [See the documentation to learn more about API](https://developer.tenable.com/reference#vulnerability-management). Check all [requirements and follow the instructions](https://docs.tenable.com/vulnerability-management/Content/Settings/my-account/GenerateAPIKey.htm) for obtaining credentials.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tenable%20App/Data%20Connectors/TenableVM/TenableVM.json","true" -"","TenableAD","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/TenableAD","","","","","","","","","","","","","","","","","false","","false" -"Tenable_ad_CL","TenableAD","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/TenableAD","","","","","","","","","","","Tenable.ad","Tenable","Tenable.ad","Tenable.ad connector allows to export Tenable.ad Indicators of Exposures, trailflow and Indicators of Attacks logs to Azure Sentinel in real time.
It provides a data parser to manipulate the logs more easily. The different workbooks ease your Active Directory monitoring and provide different ways to visualize the data. The analytic templates allow to automate responses regarding different events, exposures, or attacks.","[{""title"": """", ""description"": "">This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://raw.githubusercontent.com/tenable/Azure-Sentinel/Tenable.ad-connector/Solutions/TenableAD/Parsers/afad_parser.kql) to create the Kusto Functions alias, **afad_parser**"", ""instructions"": []}, {""title"": ""1. Configure the Syslog server"", ""description"": ""You will first need a **linux Syslog** server that Tenable.ad will send logs to. Typically you can run **rsyslog** on **Ubuntu**.\n You can then configure this server as you wish, but it is recommended to be able to output Tenable.ad logs in a separate file.\n\nConfigure rsyslog to accept logs from your Tenable.ad IP address.:\n\n```shell\nsudo -i\n\n# Set Tenable.ad source IP address\nexport TENABLE_AD_IP={Enter your IP address}\n\n# Create rsyslog configuration file\ncat > /etc/rsyslog.d/80-tenable.conf << EOF\n\\$ModLoad imudp\n\\$UDPServerRun 514\n\\$ModLoad imtcp\n\\$InputTCPServerRun 514\n\\$AllowedSender TCP, 127.0.0.1, $TENABLE_AD_IP\n\\$AllowedSender UDP, 127.0.0.1, $TENABLE_AD_IP\n\\$template MsgTemplate,\""%TIMESTAMP:::date-rfc3339% %HOSTNAME% %programname%[%procid%]:%msg%\\n\""\n\\$template remote-incoming-logs, \""/var/log/%PROGRAMNAME%.log\""\n*.* ?remote-incoming-logs;MsgTemplate\nEOF\n\n# Restart rsyslog\nsystemctl restart rsyslog\n```""}, {""title"": ""2. Install and onboard the Microsoft agent for Linux"", ""description"": ""The OMS agent will receive the Tenable.ad syslog events and publish it in Sentinel :"", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""3. Check agent logs on the Syslog server"", ""description"": ""```shell\ntail -f /var/opt/microsoft/omsagent/log/omsagent.log\n```""}, {""title"": ""4. Configure Tenable.ad to send logs to your Syslog server"", ""description"": ""On your **Tenable.ad** portal, go to *System*, *Configuration* and then *Syslog*.\nFrom there you can create a new Syslog alert toward your Syslog server.\n\nOnce this is done, check that the logs are correctly gathered on your server in a separate file (to do this, you can use the *Test the configuration* button in the Syslog alert configuration in Tenable.ad).\nIf you used the Quickstart template, the Syslog server will by default listen on port 514 in UDP and 1514 in TCP, without TLS.""}, {""title"": ""5. Configure the custom logs"", ""description"": ""Configure the agent to collect the logs.\n\n1. In Sentinel, go to **Configuration** -> **Settings** -> **Workspace settings** -> **Custom logs**.\n2. Click **Add custom log**.\n3. Upload a sample Tenable.ad.log Syslog file from the **Linux** machine running the **Syslog** server and click **Next**\n4. Set the record delimiter to **New Line** if not already the case and click **Next**.\n5. Select **Linux** and enter the file path to the **Syslog** file, click **+** then **Next**. The default location of the file is `/var/log/Tenable.ad.log` if you have a Tenable version <3.1.0, you must also add this linux file location `/var/log/AlsidForAD.log`.\n6. Set the **Name** to *Tenable_ad_CL* (Azure automatically adds *_CL* at the end of the name, there must be only one, make sure the name is not *Tenable_ad_CL_CL*).\n7. Click **Next**, you will see a resume, then click **Create**\n"", ""instructions"": []}, {""title"": ""6. Enjoy !"", ""description"": ""> You should now be able to receive logs in the *Tenable_ad_CL* table, logs data can be parse using the **afad_parser()** function, used by all query samples, workbooks and analytic templates.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Access to Tenable.ad Configuration"", ""description"": ""Permissions to configure syslog alerting engine""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/TenableAD/Data%20Connectors/Tenable.ad.json","true" -"","TenableIO","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/TenableIO","tenable","tenable-sentinel-integration","2022-06-01","","","Tenable","Partner","https://www.tenable.com/support/technical-support","","domains","","","","","","","false","","false" -"Tenable_IO_Assets_CL","TenableIO","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/TenableIO","tenable","tenable-sentinel-integration","2022-06-01","","","Tenable","Partner","https://www.tenable.com/support/technical-support","","domains","TenableIOAPI","Tenable","Tenable.io Vulnerability Management","The [Tenable.io](https://www.tenable.com/products/tenable-io) data connector provides the capability to ingest Asset and Vulnerability data into Microsoft Sentinel through the REST API from the Tenable.io platform (Managed in the cloud). Refer to [API documentation](https://developer.tenable.com/reference) for more information. The connector provides the ability to get data which helps to examine potential security risks, get insight into your computing assets, diagnose configuration problems and more","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Durable Functions to connect to the Tenable.io API to pull [assets](https://developer.tenable.com/reference#exports-assets-download-chunk) and [vulnerabilities](https://developer.tenable.com/reference#exports-vulns-request-export) at a regular interval into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a [**Tenable.io parser for vulnerabilities**](https://aka.ms/sentinel-TenableIO-TenableIOVulnerabilities-parser) and a [**Tenable.io parser for assets**](https://aka.ms/sentinel-TenableIO-TenableIOAssets-parser) based on a Kusto Function to work as expected which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for Tenable.io**\n\n [Follow the instructions](https://docs.tenable.com/tenableio/vulnerabilitymanagement/Content/Settings/GenerateAPIKey.htm) to obtain the required API credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function App**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Tenable.io Vulnerability Management Report data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TenableIO-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **TenableAccessKey** and **TenableSecretKey** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Tenable.io Vulnerability Management Report data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TenableIO-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. TenableIOXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tTenableAccessKey\n\t\tTenableSecretKey\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""Both a **TenableAccessKey** and a **TenableSecretKey** is required to access the Tenable REST API. [See the documentation to learn more about API](https://developer.tenable.com/reference#vulnerability-management). Check all [requirements and follow the instructions](https://docs.tenable.com/tenableio/vulnerabilitymanagement/Content/Settings/GenerateAPIKey.htm) for obtaining credentials.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/TenableIO/Data%20Connectors/TenableIO.json","true" -"Tenable_IO_Vuln_CL","TenableIO","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/TenableIO","tenable","tenable-sentinel-integration","2022-06-01","","","Tenable","Partner","https://www.tenable.com/support/technical-support","","domains","TenableIOAPI","Tenable","Tenable.io Vulnerability Management","The [Tenable.io](https://www.tenable.com/products/tenable-io) data connector provides the capability to ingest Asset and Vulnerability data into Microsoft Sentinel through the REST API from the Tenable.io platform (Managed in the cloud). Refer to [API documentation](https://developer.tenable.com/reference) for more information. The connector provides the ability to get data which helps to examine potential security risks, get insight into your computing assets, diagnose configuration problems and more","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Durable Functions to connect to the Tenable.io API to pull [assets](https://developer.tenable.com/reference#exports-assets-download-chunk) and [vulnerabilities](https://developer.tenable.com/reference#exports-vulns-request-export) at a regular interval into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a [**Tenable.io parser for vulnerabilities**](https://aka.ms/sentinel-TenableIO-TenableIOVulnerabilities-parser) and a [**Tenable.io parser for assets**](https://aka.ms/sentinel-TenableIO-TenableIOAssets-parser) based on a Kusto Function to work as expected which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for Tenable.io**\n\n [Follow the instructions](https://docs.tenable.com/tenableio/vulnerabilitymanagement/Content/Settings/GenerateAPIKey.htm) to obtain the required API credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function App**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Tenable.io Vulnerability Management Report data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TenableIO-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **TenableAccessKey** and **TenableSecretKey** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Tenable.io Vulnerability Management Report data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TenableIO-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. TenableIOXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tTenableAccessKey\n\t\tTenableSecretKey\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""Both a **TenableAccessKey** and a **TenableSecretKey** is required to access the Tenable REST API. [See the documentation to learn more about API](https://developer.tenable.com/reference#vulnerability-management). Check all [requirements and follow the instructions](https://docs.tenable.com/tenableio/vulnerabilitymanagement/Content/Settings/GenerateAPIKey.htm) for obtaining credentials.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/TenableIO/Data%20Connectors/TenableIO.json","true" -"","TestSolution","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/TestSolution","","","","","","","","","","domains","","","","","","","false","","false" -"","TheHive","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/TheHive","azuresentinel","azure-sentinel-solution-thehive","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"TheHive_CL","TheHive","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/TheHive","azuresentinel","azure-sentinel-solution-thehive","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","TheHiveProjectTheHive","TheHive Project","TheHive Project - TheHive","The [TheHive](http://thehive-project.org/) data connector provides the capability to ingest common TheHive events into Microsoft Sentinel through Webhooks. TheHive can notify external system of modification events (case creation, alert update, task assignment) in real time. When a change occurs in the TheHive, an HTTPS POST request with event information is sent to a callback data connector URL. Refer to [Webhooks documentation](https://docs.thehive-project.org/thehive/legacy/thehive3/admin/webhooks/) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""title"": """", ""description"": "">**NOTE:** This data connector uses Azure Functions based on HTTP Trigger for waiting POST requests with logs to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**TheHive**](https://aka.ms/sentinel-TheHive-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the TheHive**\n\n Follow the [instructions](https://docs.thehive-project.org/thehive/installation-and-configuration/configuration/webhooks/) to configure Webhooks.\n\n1. Authentication method is *Beared Auth*.\n2. Generate the **TheHiveBearerToken** according to your password policy.\n3. Setup Webhook notifications in the *application.conf* file including **TheHiveBearerToken** parameter.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the TheHive data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TheHive data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TheHive-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **TheHiveBearerToken** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n6. After deploying open Function App page, select your app, go to the **Functions** and click **Get Function Url** copy it and follow p.7 from STEP 1.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TheHive data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TheHive-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tTheHiveBearerToken\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Webhooks Credentials/permissions"", ""description"": ""**TheHiveBearerToken**, **Callback URL** are required for working Webhooks. See the documentation to learn more about [configuring Webhooks](https://docs.thehive-project.org/thehive/installation-and-configuration/configuration/webhooks/).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/TheHive/Data%20Connectors/TheHive_Webhooks_FunctionApp.json","true" -"","Theom","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Theom","theominc1667512729960","theom_sentinel","2022-11-04","","","Theom","Partner","https://www.theom.ai","","domains","","","","","","","false","","false" -"TheomAlerts_CL","Theom","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Theom","theominc1667512729960","theom_sentinel","2022-11-04","","","Theom","Partner","https://www.theom.ai","","domains","Theom","Theom","Theom","Theom Data Connector enables organizations to connect their Theom environment to Microsoft Sentinel. This solution enables users to receive alerts on data security risks, create and enrich incidents, check statistics and trigger SOAR playbooks in Microsoft Sentinel","[{""title"": """", ""description"": ""1. In **Theom UI Console** click on **Manage -> Alerts** on the side bar.\n2. Select **Sentinel** tab.\n3. Click on **Active** button to enable the configuration.\n4. Enter `Primary` key as `Authorization Token`\n5. Enter `Endpoint URL` as `https://.ods.opinsights.azure.com/api/logs?api-version=2016-04-01`\n6. Click on `SAVE SETTINGS`\n"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Theom/Data%20Connectors/Theom.json","true" -"","Threat Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence","azuresentinel","azure-sentinel-solution-threatintelligence-taxii","2022-05-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"ThreatIntelligenceIndicator","Threat Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence","azuresentinel","azure-sentinel-solution-threatintelligence-taxii","2022-05-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","MicrosoftDefenderThreatIntelligence","Microsoft","Microsoft Defender Threat Intelligence","Microsoft Sentinel provides you the capability to import threat intelligence generated by Microsoft to enable monitoring, alerting and hunting. Use this data connector to import Indicators of Compromise (IOCs) from Microsoft Defender Threat Intelligence (MDTI) into Microsoft Sentinel. Threat indicators can include IP addresses, domains, URLs, and file hashes, etc.","[{""title"": ""Use this data connector to import Indicators of Compromise (IOCs) from Microsoft Defender Threat Intelligence (MDTI) into Microsoft Sentinel.""}, {""instructions"": [{""type"": ""MicrosoftThreatIntelligence"", ""parameters"": {""connectorKind"": ""MicrosoftThreatIntelligence""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence/Data%20Connectors/template_MicrosoftDefenderThreatIntelligence.json","true" -"ThreatIntelligenceIndicator","Threat Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence","azuresentinel","azure-sentinel-solution-threatintelligence-taxii","2022-05-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","PremiumMicrosoftDefenderForThreatIntelligence","Microsoft","Premium Microsoft Defender Threat Intelligence","Microsoft Sentinel provides you the capability to import threat intelligence generated by Microsoft to enable monitoring, alerting and hunting. Use this data connector to import Indicators of Compromise (IOCs) from Premium Microsoft Defender Threat Intelligence (MDTI) into Microsoft Sentinel. Threat indicators can include IP addresses, domains, URLs, and file hashes, etc. Note: This is a paid connector. To use and ingest data from it, please purchase the ""MDTI API Access"" SKU from the Partner Center.","[{""title"": ""Use this data connector to import Indicators of Compromise (IOCs) from Premium Microsoft Defender Threat Intelligence (MDTI) into Microsoft Sentinel.""}, {""instructions"": [{""type"": ""PremiumMicrosoftDefenderForThreatIntelligence"", ""parameters"": {""connectorKind"": ""PremiumMicrosoftDefenderForThreatIntelligence""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence/Data%20Connectors/template_PremiumMicrosoftDefenderThreatIntelligence.json","true" -"CommonSecurityLog","Threat Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence","azuresentinel","azure-sentinel-solution-threatintelligence-taxii","2022-05-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligence","Microsoft","Threat Intelligence Platforms","Microsoft Sentinel integrates with Microsoft Graph Security API data sources to enable monitoring, alerting, and hunting using your threat intelligence. Use this connector to send threat indicators to Microsoft Sentinel from your Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MindMeld, MISP, or other integrated applications. Threat indicators can include IP addresses, domains, URLs, and file hashes. For more information, see the [Microsoft Sentinel documentation >](https://go.microsoft.com/fwlink/p/?linkid=2223729&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""You can connect your threat intelligence data sources to Microsoft Sentinel by either:"", ""description"": ""- Using an integrated Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MindMeld, MISP, and others.\n\n- Calling the Microsoft Graph Security API directly from another application.""}, {""title"": ""Follow These Steps to Connect your Threat Intelligence:"", ""description"": ""1) [Register an application](https://docs.microsoft.com/graph/auth-v2-service#1-register-your-app) in Azure Active Directory.\n\n2) [Configure permissions](https://docs.microsoft.com/graph/auth-v2-service#2-configure-permissions-for-microsoft-graph) and be sure to add the ThreatIndicators.ReadWrite.OwnedBy permission to the application.\n\n3) Ask your Azure AD tenant administrator to [grant consent](https://docs.microsoft.com/graph/auth-v2-service#3-get-administrator-consent) to the application.\n\n4) Configure your TIP or other integrated application to push indicators to Microsoft Sentinel by specifying the following:\n\n a. The application ID and secret you received when registering the app (step 1 above). \n\n b. Set \u201cMicrosoft Sentinel\u201d as the target.\n\n c. Set an action for each indicator - \u2018alert\u2019 is most relevant for Microsoft Sentinel use cases \n\nFor the latest list of integrated Threat Intelligence Platforms and detailed configuration instructions, see the [full documentation](https://docs.microsoft.com/azure/sentinel/import-threat-intelligence#adding-threat-indicators-to-azure-sentinel-with-the-threat-intelligence-platforms-data-connector).\n\nClick on \""Connect\"" below\n\n> Data from all regions will be sent to and stored in the workspace's region."", ""instructions"": [{""parameters"": {}, ""type"": ""ThreatIntelligence""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""tenant"": [""GlobalAdmin"", ""SecurityAdmin""]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence/Data%20Connectors/template_ThreatIntelligence.json","true" -"ThreatIntelligenceIndicator","Threat Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence","azuresentinel","azure-sentinel-solution-threatintelligence-taxii","2022-05-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligence","Microsoft","Threat Intelligence Platforms","Microsoft Sentinel integrates with Microsoft Graph Security API data sources to enable monitoring, alerting, and hunting using your threat intelligence. Use this connector to send threat indicators to Microsoft Sentinel from your Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MindMeld, MISP, or other integrated applications. Threat indicators can include IP addresses, domains, URLs, and file hashes. For more information, see the [Microsoft Sentinel documentation >](https://go.microsoft.com/fwlink/p/?linkid=2223729&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""You can connect your threat intelligence data sources to Microsoft Sentinel by either:"", ""description"": ""- Using an integrated Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MindMeld, MISP, and others.\n\n- Calling the Microsoft Graph Security API directly from another application.""}, {""title"": ""Follow These Steps to Connect your Threat Intelligence:"", ""description"": ""1) [Register an application](https://docs.microsoft.com/graph/auth-v2-service#1-register-your-app) in Azure Active Directory.\n\n2) [Configure permissions](https://docs.microsoft.com/graph/auth-v2-service#2-configure-permissions-for-microsoft-graph) and be sure to add the ThreatIndicators.ReadWrite.OwnedBy permission to the application.\n\n3) Ask your Azure AD tenant administrator to [grant consent](https://docs.microsoft.com/graph/auth-v2-service#3-get-administrator-consent) to the application.\n\n4) Configure your TIP or other integrated application to push indicators to Microsoft Sentinel by specifying the following:\n\n a. The application ID and secret you received when registering the app (step 1 above). \n\n b. Set \u201cMicrosoft Sentinel\u201d as the target.\n\n c. Set an action for each indicator - \u2018alert\u2019 is most relevant for Microsoft Sentinel use cases \n\nFor the latest list of integrated Threat Intelligence Platforms and detailed configuration instructions, see the [full documentation](https://docs.microsoft.com/azure/sentinel/import-threat-intelligence#adding-threat-indicators-to-azure-sentinel-with-the-threat-intelligence-platforms-data-connector).\n\nClick on \""Connect\"" below\n\n> Data from all regions will be sent to and stored in the workspace's region."", ""instructions"": [{""parameters"": {}, ""type"": ""ThreatIntelligence""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""tenant"": [""GlobalAdmin"", ""SecurityAdmin""]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence/Data%20Connectors/template_ThreatIntelligence.json","true" -"ThreatIntelligenceIndicator","Threat Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence","azuresentinel","azure-sentinel-solution-threatintelligence-taxii","2022-05-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligenceTaxii","Microsoft","Threat intelligence - TAXII","Microsoft Sentinel integrates with TAXII 2.0 and 2.1 data sources to enable monitoring, alerting, and hunting using your threat intelligence. Use this connector to send the supported STIX object types from TAXII servers to Microsoft Sentinel. Threat indicators can include IP addresses, domains, URLs, and file hashes. For more information, see the [Microsoft Sentinel documentation >](https://go.microsoft.com/fwlink/p/?linkid=2224105&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Configure TAXII servers to stream STIX 2.0 or 2.1 STIX objects to Microsoft Sentinel"", ""description"": ""You can connect your TAXII servers to Microsoft Sentinel using the built-in TAXII connector. For detailed configuration instructions, see the [full documentation](https://docs.microsoft.com/azure/sentinel/import-threat-intelligence#adding-threat-indicators-to-azure-sentinel-with-the-threat-intelligence---taxii-data-connector). \n\nEnter the following information and select Add to configure your TAXII server."", ""instructions"": [{""parameters"": {}, ""type"": ""ThreatIntelligenceTaxii""}]}]","{""customs"": [{""name"": ""TAXII Server"", ""description"": ""TAXII 2.0 or TAXII 2.1 Server URI and Collection ID.""}], ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence/Data%20Connectors/template_ThreatIntelligenceTaxii.json","true" -"ThreatIntelligenceIndicator","Threat Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence","azuresentinel","azure-sentinel-solution-threatintelligence-taxii","2022-05-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligenceUploadIndicatorsAPI","Microsoft","Threat Intelligence Upload API (Preview)","Microsoft Sentinel offers a data plane API to bring in threat intelligence from your Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MineMeld, MISP, or other integrated applications. Threat indicators can include IP addresses, domains, URLs, file hashes and email addresses. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2269830&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""You can connect your threat intelligence data sources to Microsoft Sentinel by either: "", ""description"": ""\n>Using an integrated Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MineMeld, MISP, and others. \n\n>Calling the Microsoft Sentinel data plane API directly from another application. \n - Note: The 'Status' of the connector will not appear as 'Connected' here, because the data is ingested by making an API call.""}, {""title"": ""Follow These Steps to Connect to your Threat Intelligence: "", ""description"": """"}, {""title"": ""1. Get Microsoft Entra ID Access Token"", ""description"": ""To send request to the APIs, you need to acquire Microsoft Entra ID access token. You can follow instruction in this page: https://docs.microsoft.com/azure/databricks/dev-tools/api/latest/aad/app-aad-token#get-an-azure-ad-access-token \n - Notice: Please request Microsoft Entra ID access token with scope value: \nFairfax: https://management.usgovcloudapi.net/.default \nMooncake: https://management.chinacloudapi.cn/.default ""}, {""title"": ""2. Send STIX objects to Sentinel"", ""description"": ""You can send the supported STIX object types by calling our Upload API. For more information about the API, click [here](https://learn.microsoft.com/azure/sentinel/stix-objects-api). \n\n>HTTP method: POST \n\n>Endpoint: \nFairfax: https://api.ti.sentinel.azure.us/workspaces/[WorkspaceID]/threatintelligence-stix-objects:upload?api-version=2024-02-01-preview \nMooncake: https://api.ti.sentinel.azure.cn/workspaces/[WorkspaceID]/threatintelligence-stix-objects:upload?api-version=2024-02-01-preview \n\n>WorkspaceID: the workspace that the STIX objects are uploaded to. \n\n\n>Header Value 1: \""Authorization\"" = \""Bearer [Microsoft Entra ID Access Token from step 1]\"" \n\n\n> Header Value 2: \""Content-Type\"" = \""application/json\"" \n \n>Body: The body is a JSON object containing an array of STIX objects.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.SecurityInsights/threatintelligence/write"", ""permissionsDisplayText"": ""write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence/Data%20Connectors/template_ThreatIntelligenceUploadIndicators_ForGov.json","true" -"ThreatIntelligenceIndicator","Threat Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence","azuresentinel","azure-sentinel-solution-threatintelligence-taxii","2022-05-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligenceUploadIndicatorsAPI","Microsoft","Threat Intelligence Upload API (Preview)","Microsoft Sentinel offers a data plane API to bring in threat intelligence from your Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MineMeld, MISP, or other integrated applications. Threat indicators can include IP addresses, domains, URLs, file hashes and email addresses. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2269830&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""You can connect your threat intelligence data sources to Microsoft Sentinel by either: "", ""description"": ""\n>Using an integrated Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MineMeld, MISP, and others. \n\n>Calling the Microsoft Sentinel data plane API directly from another application. \n - Note: The 'Status' of the connector will not appear as 'Connected' here, because the data is ingested by making an API call.""}, {""title"": ""Follow These Steps to Connect to your Threat Intelligence: "", ""description"": """"}, {""title"": ""1. Get Microsoft Entra ID Access Token"", ""description"": ""To send request to the APIs, you need to acquire Microsoft Entra ID access token. You can follow instruction in this page: https://docs.microsoft.com/azure/databricks/dev-tools/api/latest/aad/app-aad-token#get-an-azure-ad-access-token \n - Notice: Please request Microsoft Entra ID access token with scope value: https://management.azure.com/.default ""}, {""title"": ""2. Send STIX objects to Sentinel"", ""description"": ""You can send the supported STIX object types by calling our Upload API. For more information about the API, click [here](https://learn.microsoft.com/azure/sentinel/stix-objects-api). \n\n>HTTP method: POST \n\n>Endpoint: https://api.ti.sentinel.azure.com/workspaces/[WorkspaceID]/threatintelligence-stix-objects:upload?api-version=2024-02-01-preview \n\n>WorkspaceID: the workspace that the STIX objects are uploaded to. \n\n\n>Header Value 1: \""Authorization\"" = \""Bearer [Microsoft Entra ID Access Token from step 1]\"" \n\n\n> Header Value 2: \""Content-Type\"" = \""application/json\"" \n \n>Body: The body is a JSON object containing an array of STIX objects.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.SecurityInsights/threatintelligence/write"", ""permissionsDisplayText"": ""write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence/Data%20Connectors/template_ThreatIntelligenceUploadIndicators.json","true" -"","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"ThreatIntelIndicators","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","MicrosoftDefenderThreatIntelligence","Microsoft","Microsoft Defender Threat Intelligence","Microsoft Sentinel provides you the capability to import threat intelligence generated by Microsoft to enable monitoring, alerting and hunting. Use this data connector to import Indicators of Compromise (IOCs) from Microsoft Defender Threat Intelligence (MDTI) into Microsoft Sentinel. Threat indicators can include IP addresses, domains, URLs, and file hashes, etc.","[{""title"": ""Use this data connector to import Indicators of Compromise (IOCs) from Microsoft Defender Threat Intelligence (MDTI) into Microsoft Sentinel.""}, {""instructions"": [{""type"": ""MicrosoftThreatIntelligence"", ""parameters"": {""connectorKind"": ""MicrosoftThreatIntelligence""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_MicrosoftDefenderThreatIntelligence.json","true" -"ThreatIntelObjects","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","MicrosoftDefenderThreatIntelligence","Microsoft","Microsoft Defender Threat Intelligence","Microsoft Sentinel provides you the capability to import threat intelligence generated by Microsoft to enable monitoring, alerting and hunting. Use this data connector to import Indicators of Compromise (IOCs) from Microsoft Defender Threat Intelligence (MDTI) into Microsoft Sentinel. Threat indicators can include IP addresses, domains, URLs, and file hashes, etc.","[{""title"": ""Use this data connector to import Indicators of Compromise (IOCs) from Microsoft Defender Threat Intelligence (MDTI) into Microsoft Sentinel.""}, {""instructions"": [{""type"": ""MicrosoftThreatIntelligence"", ""parameters"": {""connectorKind"": ""MicrosoftThreatIntelligence""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_MicrosoftDefenderThreatIntelligence.json","true" -"ThreatIntelIndicators","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","PremiumMicrosoftDefenderForThreatIntelligence","Microsoft","Premium Microsoft Defender Threat Intelligence","Microsoft Sentinel provides you the capability to import threat intelligence generated by Microsoft to enable monitoring, alerting and hunting. Use this data connector to import Indicators of Compromise (IOCs) from Premium Microsoft Defender Threat Intelligence (MDTI) into Microsoft Sentinel. Threat indicators can include IP addresses, domains, URLs, and file hashes, etc. Note: This is a paid connector. To use and ingest data from it, please purchase the ""MDTI API Access"" SKU from the Partner Center.","[{""title"": ""Use this data connector to import Indicators of Compromise (IOCs) from Premium Microsoft Defender Threat Intelligence (MDTI) into Microsoft Sentinel.""}, {""instructions"": [{""type"": ""PremiumMicrosoftDefenderForThreatIntelligence"", ""parameters"": {""connectorKind"": ""PremiumMicrosoftDefenderForThreatIntelligence""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_PremiumMicrosoftDefenderThreatIntelligence.json","true" -"ThreatIntelObjects","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","PremiumMicrosoftDefenderForThreatIntelligence","Microsoft","Premium Microsoft Defender Threat Intelligence","Microsoft Sentinel provides you the capability to import threat intelligence generated by Microsoft to enable monitoring, alerting and hunting. Use this data connector to import Indicators of Compromise (IOCs) from Premium Microsoft Defender Threat Intelligence (MDTI) into Microsoft Sentinel. Threat indicators can include IP addresses, domains, URLs, and file hashes, etc. Note: This is a paid connector. To use and ingest data from it, please purchase the ""MDTI API Access"" SKU from the Partner Center.","[{""title"": ""Use this data connector to import Indicators of Compromise (IOCs) from Premium Microsoft Defender Threat Intelligence (MDTI) into Microsoft Sentinel.""}, {""instructions"": [{""type"": ""PremiumMicrosoftDefenderForThreatIntelligence"", ""parameters"": {""connectorKind"": ""PremiumMicrosoftDefenderForThreatIntelligence""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_PremiumMicrosoftDefenderThreatIntelligence.json","true" -"CommonSecurityLog","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligence","Microsoft","Threat Intelligence Platforms","Microsoft Sentinel integrates with Microsoft Graph Security API data sources to enable monitoring, alerting, and hunting using your threat intelligence. Use this connector to send threat indicators to Microsoft Sentinel from your Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MindMeld, MISP, or other integrated applications. Threat indicators can include IP addresses, domains, URLs, and file hashes. For more information, see the [Microsoft Sentinel documentation >](https://go.microsoft.com/fwlink/p/?linkid=2223729&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""You can connect your threat intelligence data sources to Microsoft Sentinel by either:"", ""description"": ""- Using an integrated Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MindMeld, MISP, and others.\n\n- Calling the Microsoft Graph Security API directly from another application.""}, {""title"": ""Follow These Steps to Connect your Threat Intelligence:"", ""description"": ""1) [Register an application](https://docs.microsoft.com/graph/auth-v2-service#1-register-your-app) in Azure Active Directory.\n\n2) [Configure permissions](https://docs.microsoft.com/graph/auth-v2-service#2-configure-permissions-for-microsoft-graph) and be sure to add the ThreatIndicators.ReadWrite.OwnedBy permission to the application.\n\n3) Ask your Azure AD tenant administrator to [grant consent](https://docs.microsoft.com/graph/auth-v2-service#3-get-administrator-consent) to the application.\n\n4) Configure your TIP or other integrated application to push indicators to Microsoft Sentinel by specifying the following:\n\n a. The application ID and secret you received when registering the app (step 1 above). \n\n b. Set \u201cMicrosoft Sentinel\u201d as the target.\n\n c. Set an action for each indicator - \u2018alert\u2019 is most relevant for Microsoft Sentinel use cases \n\nFor the latest list of integrated Threat Intelligence Platforms and detailed configuration instructions, see the [full documentation](https://docs.microsoft.com/azure/sentinel/import-threat-intelligence#adding-threat-indicators-to-azure-sentinel-with-the-threat-intelligence-platforms-data-connector).\n\nClick on \""Connect\"" below\n\n> Data from all regions will be sent to and stored in the workspace's region."", ""instructions"": [{""parameters"": {}, ""type"": ""ThreatIntelligence""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""tenant"": [""GlobalAdmin"", ""SecurityAdmin""]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_ThreatIntelligence.json","true" -"ThreatIntelIndicators","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligence","Microsoft","Threat Intelligence Platforms","Microsoft Sentinel integrates with Microsoft Graph Security API data sources to enable monitoring, alerting, and hunting using your threat intelligence. Use this connector to send threat indicators to Microsoft Sentinel from your Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MindMeld, MISP, or other integrated applications. Threat indicators can include IP addresses, domains, URLs, and file hashes. For more information, see the [Microsoft Sentinel documentation >](https://go.microsoft.com/fwlink/p/?linkid=2223729&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""You can connect your threat intelligence data sources to Microsoft Sentinel by either:"", ""description"": ""- Using an integrated Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MindMeld, MISP, and others.\n\n- Calling the Microsoft Graph Security API directly from another application.""}, {""title"": ""Follow These Steps to Connect your Threat Intelligence:"", ""description"": ""1) [Register an application](https://docs.microsoft.com/graph/auth-v2-service#1-register-your-app) in Azure Active Directory.\n\n2) [Configure permissions](https://docs.microsoft.com/graph/auth-v2-service#2-configure-permissions-for-microsoft-graph) and be sure to add the ThreatIndicators.ReadWrite.OwnedBy permission to the application.\n\n3) Ask your Azure AD tenant administrator to [grant consent](https://docs.microsoft.com/graph/auth-v2-service#3-get-administrator-consent) to the application.\n\n4) Configure your TIP or other integrated application to push indicators to Microsoft Sentinel by specifying the following:\n\n a. The application ID and secret you received when registering the app (step 1 above). \n\n b. Set \u201cMicrosoft Sentinel\u201d as the target.\n\n c. Set an action for each indicator - \u2018alert\u2019 is most relevant for Microsoft Sentinel use cases \n\nFor the latest list of integrated Threat Intelligence Platforms and detailed configuration instructions, see the [full documentation](https://docs.microsoft.com/azure/sentinel/import-threat-intelligence#adding-threat-indicators-to-azure-sentinel-with-the-threat-intelligence-platforms-data-connector).\n\nClick on \""Connect\"" below\n\n> Data from all regions will be sent to and stored in the workspace's region."", ""instructions"": [{""parameters"": {}, ""type"": ""ThreatIntelligence""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""tenant"": [""GlobalAdmin"", ""SecurityAdmin""]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_ThreatIntelligence.json","true" -"ThreatIntelObjects","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligence","Microsoft","Threat Intelligence Platforms","Microsoft Sentinel integrates with Microsoft Graph Security API data sources to enable monitoring, alerting, and hunting using your threat intelligence. Use this connector to send threat indicators to Microsoft Sentinel from your Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MindMeld, MISP, or other integrated applications. Threat indicators can include IP addresses, domains, URLs, and file hashes. For more information, see the [Microsoft Sentinel documentation >](https://go.microsoft.com/fwlink/p/?linkid=2223729&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""You can connect your threat intelligence data sources to Microsoft Sentinel by either:"", ""description"": ""- Using an integrated Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MindMeld, MISP, and others.\n\n- Calling the Microsoft Graph Security API directly from another application.""}, {""title"": ""Follow These Steps to Connect your Threat Intelligence:"", ""description"": ""1) [Register an application](https://docs.microsoft.com/graph/auth-v2-service#1-register-your-app) in Azure Active Directory.\n\n2) [Configure permissions](https://docs.microsoft.com/graph/auth-v2-service#2-configure-permissions-for-microsoft-graph) and be sure to add the ThreatIndicators.ReadWrite.OwnedBy permission to the application.\n\n3) Ask your Azure AD tenant administrator to [grant consent](https://docs.microsoft.com/graph/auth-v2-service#3-get-administrator-consent) to the application.\n\n4) Configure your TIP or other integrated application to push indicators to Microsoft Sentinel by specifying the following:\n\n a. The application ID and secret you received when registering the app (step 1 above). \n\n b. Set \u201cMicrosoft Sentinel\u201d as the target.\n\n c. Set an action for each indicator - \u2018alert\u2019 is most relevant for Microsoft Sentinel use cases \n\nFor the latest list of integrated Threat Intelligence Platforms and detailed configuration instructions, see the [full documentation](https://docs.microsoft.com/azure/sentinel/import-threat-intelligence#adding-threat-indicators-to-azure-sentinel-with-the-threat-intelligence-platforms-data-connector).\n\nClick on \""Connect\"" below\n\n> Data from all regions will be sent to and stored in the workspace's region."", ""instructions"": [{""parameters"": {}, ""type"": ""ThreatIntelligence""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""tenant"": [""GlobalAdmin"", ""SecurityAdmin""]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_ThreatIntelligence.json","true" -"ThreatIntelIndicators","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligenceTaxii","Microsoft","Threat intelligence - TAXII","Microsoft Sentinel integrates with TAXII 2.0 and 2.1 data sources to enable monitoring, alerting, and hunting using your threat intelligence. Use this connector to send the supported STIX object types from TAXII servers to Microsoft Sentinel. Threat indicators can include IP addresses, domains, URLs, and file hashes. For more information, see the [Microsoft Sentinel documentation >](https://go.microsoft.com/fwlink/p/?linkid=2224105&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Configure TAXII servers to stream STIX 2.0 or 2.1 STIX objects to Microsoft Sentinel"", ""description"": ""You can connect your TAXII servers to Microsoft Sentinel using the built-in TAXII connector. For detailed configuration instructions, see the [full documentation](https://docs.microsoft.com/azure/sentinel/import-threat-intelligence#adding-threat-indicators-to-azure-sentinel-with-the-threat-intelligence---taxii-data-connector). \n\nEnter the following information and select Add to configure your TAXII server."", ""instructions"": [{""parameters"": {}, ""type"": ""ThreatIntelligenceTaxii""}]}]","{""customs"": [{""name"": ""TAXII Server"", ""description"": ""TAXII 2.0 or TAXII 2.1 Server URI and Collection ID.""}], ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_ThreatIntelligenceTaxii.json","true" -"ThreatIntelObjects","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligenceTaxii","Microsoft","Threat intelligence - TAXII","Microsoft Sentinel integrates with TAXII 2.0 and 2.1 data sources to enable monitoring, alerting, and hunting using your threat intelligence. Use this connector to send the supported STIX object types from TAXII servers to Microsoft Sentinel. Threat indicators can include IP addresses, domains, URLs, and file hashes. For more information, see the [Microsoft Sentinel documentation >](https://go.microsoft.com/fwlink/p/?linkid=2224105&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Configure TAXII servers to stream STIX 2.0 or 2.1 STIX objects to Microsoft Sentinel"", ""description"": ""You can connect your TAXII servers to Microsoft Sentinel using the built-in TAXII connector. For detailed configuration instructions, see the [full documentation](https://docs.microsoft.com/azure/sentinel/import-threat-intelligence#adding-threat-indicators-to-azure-sentinel-with-the-threat-intelligence---taxii-data-connector). \n\nEnter the following information and select Add to configure your TAXII server."", ""instructions"": [{""parameters"": {}, ""type"": ""ThreatIntelligenceTaxii""}]}]","{""customs"": [{""name"": ""TAXII Server"", ""description"": ""TAXII 2.0 or TAXII 2.1 Server URI and Collection ID.""}], ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_ThreatIntelligenceTaxii.json","true" -"ThreatIntelExportOperation","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligenceTaxiiExport","Microsoft","Threat intelligence - TAXII Export (Preview)","Microsoft Sentinel integrates with TAXII 2.1 servers to enable exporting of your threat intelligence objects. Use this connector to send the supported STIX object types from Microsoft Sentinel to TAXII servers.","[{""title"": ""Configure TAXII servers to export STIX 2.1 objects to. Once configured, you can start exporting STIX objects from your TI repository"", ""instructions"": [{""parameters"": {""isExport"": true}, ""type"": ""ThreatIntelligenceTaxii""}]}]","{""customs"": [{""name"": ""TAXII Server"", ""description"": ""TAXII 2.1 Server URL and Collection ID.""}], ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_ThreatIntelligenceTaxiiExport.json","true" -"ThreatIntelIndicators","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligenceUploadIndicatorsAPI","Microsoft","Threat Intelligence Upload API (Preview)","Microsoft Sentinel offers a data plane API to bring in threat intelligence from your Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MineMeld, MISP, or other integrated applications. Threat indicators can include IP addresses, domains, URLs, file hashes and email addresses. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2269830&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""You can connect your threat intelligence data sources to Microsoft Sentinel by either: "", ""description"": ""\n>Using an integrated Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MineMeld, MISP, and others. \n\n>Calling the Microsoft Sentinel data plane API directly from another application. \n - Note: The 'Status' of the connector will not appear as 'Connected' here, because the data is ingested by making an API call.""}, {""title"": ""Follow These Steps to Connect to your Threat Intelligence: "", ""description"": """"}, {""title"": ""1. Get Microsoft Entra ID Access Token"", ""description"": ""To send request to the APIs, you need to acquire Microsoft Entra ID access token. You can follow instruction in this page: https://docs.microsoft.com/azure/databricks/dev-tools/api/latest/aad/app-aad-token#get-an-azure-ad-access-token \n - Notice: Please request Microsoft Entra ID access token with scope value: \nFairfax: https://management.usgovcloudapi.net/.default \nMooncake: https://management.chinacloudapi.cn/.default ""}, {""title"": ""2. Send STIX objects to Sentinel"", ""description"": ""You can send the supported STIX object types by calling our Upload API. For more information about the API, click [here](https://learn.microsoft.com/azure/sentinel/stix-objects-api). \n\n>HTTP method: POST \n\n>Endpoint: \nFairfax: https://api.ti.sentinel.azure.us/workspaces/[WorkspaceID]/threatintelligence-stix-objects:upload?api-version=2024-02-01-preview \nMooncake: https://api.ti.sentinel.azure.cn/workspaces/[WorkspaceID]/threatintelligence-stix-objects:upload?api-version=2024-02-01-preview \n\n>WorkspaceID: the workspace that the STIX objects are uploaded to. \n\n\n>Header Value 1: \""Authorization\"" = \""Bearer [Microsoft Entra ID Access Token from step 1]\"" \n\n\n> Header Value 2: \""Content-Type\"" = \""application/json\"" \n \n>Body: The body is a JSON object containing an array of STIX objects.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.SecurityInsights/threatintelligence/write"", ""permissionsDisplayText"": ""write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_ThreatIntelligenceUploadIndicators_ForGov.json","true" -"ThreatIntelObjects","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligenceUploadIndicatorsAPI","Microsoft","Threat Intelligence Upload API (Preview)","Microsoft Sentinel offers a data plane API to bring in threat intelligence from your Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MineMeld, MISP, or other integrated applications. Threat indicators can include IP addresses, domains, URLs, file hashes and email addresses. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2269830&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""You can connect your threat intelligence data sources to Microsoft Sentinel by either: "", ""description"": ""\n>Using an integrated Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MineMeld, MISP, and others. \n\n>Calling the Microsoft Sentinel data plane API directly from another application. \n - Note: The 'Status' of the connector will not appear as 'Connected' here, because the data is ingested by making an API call.""}, {""title"": ""Follow These Steps to Connect to your Threat Intelligence: "", ""description"": """"}, {""title"": ""1. Get Microsoft Entra ID Access Token"", ""description"": ""To send request to the APIs, you need to acquire Microsoft Entra ID access token. You can follow instruction in this page: https://docs.microsoft.com/azure/databricks/dev-tools/api/latest/aad/app-aad-token#get-an-azure-ad-access-token \n - Notice: Please request Microsoft Entra ID access token with scope value: \nFairfax: https://management.usgovcloudapi.net/.default \nMooncake: https://management.chinacloudapi.cn/.default ""}, {""title"": ""2. Send STIX objects to Sentinel"", ""description"": ""You can send the supported STIX object types by calling our Upload API. For more information about the API, click [here](https://learn.microsoft.com/azure/sentinel/stix-objects-api). \n\n>HTTP method: POST \n\n>Endpoint: \nFairfax: https://api.ti.sentinel.azure.us/workspaces/[WorkspaceID]/threatintelligence-stix-objects:upload?api-version=2024-02-01-preview \nMooncake: https://api.ti.sentinel.azure.cn/workspaces/[WorkspaceID]/threatintelligence-stix-objects:upload?api-version=2024-02-01-preview \n\n>WorkspaceID: the workspace that the STIX objects are uploaded to. \n\n\n>Header Value 1: \""Authorization\"" = \""Bearer [Microsoft Entra ID Access Token from step 1]\"" \n\n\n> Header Value 2: \""Content-Type\"" = \""application/json\"" \n \n>Body: The body is a JSON object containing an array of STIX objects.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.SecurityInsights/threatintelligence/write"", ""permissionsDisplayText"": ""write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_ThreatIntelligenceUploadIndicators_ForGov.json","true" -"ThreatIntelIndicators","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligenceUploadIndicatorsAPI","Microsoft","Threat Intelligence Upload API (Preview)","Microsoft Sentinel offers a data plane API to bring in threat intelligence from your Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MineMeld, MISP, or other integrated applications. Threat indicators can include IP addresses, domains, URLs, file hashes and email addresses. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2269830&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""You can connect your threat intelligence data sources to Microsoft Sentinel by either: "", ""description"": ""\n>Using an integrated Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MineMeld, MISP, and others. \n\n>Calling the Microsoft Sentinel data plane API directly from another application. \n - Note: The 'Status' of the connector will not appear as 'Connected' here, because the data is ingested by making an API call.""}, {""title"": ""Follow These Steps to Connect to your Threat Intelligence: "", ""description"": """"}, {""title"": ""1. Get Microsoft Entra ID Access Token"", ""description"": ""To send request to the APIs, you need to acquire Microsoft Entra ID access token. You can follow instruction in this page: https://docs.microsoft.com/azure/databricks/dev-tools/api/latest/aad/app-aad-token#get-an-azure-ad-access-token \n - Notice: Please request Microsoft Entra ID access token with scope value: https://management.azure.com/.default ""}, {""title"": ""2. Send STIX objects to Sentinel"", ""description"": ""You can send the supported STIX object types by calling our Upload API. For more information about the API, click [here](https://learn.microsoft.com/azure/sentinel/stix-objects-api). \n\n>HTTP method: POST \n\n>Endpoint: https://api.ti.sentinel.azure.com/workspaces/[WorkspaceID]/threatintelligence-stix-objects:upload?api-version=2024-02-01-preview \n\n>WorkspaceID: the workspace that the STIX objects are uploaded to. \n\n\n>Header Value 1: \""Authorization\"" = \""Bearer [Microsoft Entra ID Access Token from step 1]\"" \n\n\n> Header Value 2: \""Content-Type\"" = \""application/json\"" \n \n>Body: The body is a JSON object containing an array of STIX objects.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.SecurityInsights/threatintelligence/write"", ""permissionsDisplayText"": ""write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_ThreatIntelligenceUploadIndicators.json","true" -"ThreatIntelObjects","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligenceUploadIndicatorsAPI","Microsoft","Threat Intelligence Upload API (Preview)","Microsoft Sentinel offers a data plane API to bring in threat intelligence from your Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MineMeld, MISP, or other integrated applications. Threat indicators can include IP addresses, domains, URLs, file hashes and email addresses. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2269830&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""You can connect your threat intelligence data sources to Microsoft Sentinel by either: "", ""description"": ""\n>Using an integrated Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MineMeld, MISP, and others. \n\n>Calling the Microsoft Sentinel data plane API directly from another application. \n - Note: The 'Status' of the connector will not appear as 'Connected' here, because the data is ingested by making an API call.""}, {""title"": ""Follow These Steps to Connect to your Threat Intelligence: "", ""description"": """"}, {""title"": ""1. Get Microsoft Entra ID Access Token"", ""description"": ""To send request to the APIs, you need to acquire Microsoft Entra ID access token. You can follow instruction in this page: https://docs.microsoft.com/azure/databricks/dev-tools/api/latest/aad/app-aad-token#get-an-azure-ad-access-token \n - Notice: Please request Microsoft Entra ID access token with scope value: https://management.azure.com/.default ""}, {""title"": ""2. Send STIX objects to Sentinel"", ""description"": ""You can send the supported STIX object types by calling our Upload API. For more information about the API, click [here](https://learn.microsoft.com/azure/sentinel/stix-objects-api). \n\n>HTTP method: POST \n\n>Endpoint: https://api.ti.sentinel.azure.com/workspaces/[WorkspaceID]/threatintelligence-stix-objects:upload?api-version=2024-02-01-preview \n\n>WorkspaceID: the workspace that the STIX objects are uploaded to. \n\n\n>Header Value 1: \""Authorization\"" = \""Bearer [Microsoft Entra ID Access Token from step 1]\"" \n\n\n> Header Value 2: \""Content-Type\"" = \""application/json\"" \n \n>Body: The body is a JSON object containing an array of STIX objects.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.SecurityInsights/threatintelligence/write"", ""permissionsDisplayText"": ""write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_ThreatIntelligenceUploadIndicators.json","true" -"","Threat Intelligence Solution for Azure Government","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20Solution%20for%20Azure%20Government","azuresentinel","azure-sentinel-solution-threatintelligenceazuregov","2023-03-06","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"","ThreatAnalysis&Response","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ThreatAnalysis%26Response","azuresentinel","azure-sentinel-solution-mitreattck","2021-10-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"","ThreatConnect","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ThreatConnect","threatconnectinc1694630408738","sentinel-threatconnect-byol-enterprise","2023-09-11","2023-09-11","","ThreatConnect, Inc.","Partner","https://threatconnect.com/contact/","","domains","","","","","","","false","","false" -"","ThreatXCloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ThreatXCloud","azuresentinel","azure-sentinel-solution-threatxwaf","2022-09-23","2022-09-23","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"","Tomcat","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tomcat","azuresentinel","azure-sentinel-solution-apachetomcat","2022-01-31","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"Tomcat_CL","Tomcat","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tomcat","azuresentinel","azure-sentinel-solution-apachetomcat","2022-01-31","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","ApacheTomcat","Apache","[Deprecated] Apache Tomcat","The Apache Tomcat solution provides the capability to ingest [Apache Tomcat](http://tomcat.apache.org/) events into Microsoft Sentinel. Refer to [Apache Tomcat documentation](http://tomcat.apache.org/tomcat-10.0-doc/logging.html) for more information.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias TomcatEvent and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tomcat/Parsers/TomcatEvent.txt).The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": """", ""description"": "">**NOTE:** This data connector has been developed using Apache Tomcat version 10.0.4"", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Apache Tomcat Server where the logs are generated.\n\n> Logs from Apache Tomcat Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the custom log directory to be collected"", ""instructions"": [{""parameters"": {""linkType"": ""OpenCustomLogsSettings""}, ""type"": ""InstallAgent""}]}, {""title"": """", ""description"": ""1. Select the link above to open your workspace advanced settings \n2. From the left pane, select **Data**, select **Custom Logs** and click **Add+**\n3. Click **Browse** to upload a sample of a Tomcat log file (e.g. access.log or error.log). Then, click **Next >**\n4. Select **New line** as the record delimiter and click **Next >**\n5. Select **Windows** or **Linux** and enter the path to Tomcat logs based on your configuration. Example: \n - **Linux** Directory: '/var/log/tomcat/*.log' \n6. After entering the path, click the '+' symbol to apply, then click **Next >** \n7. Add **Tomcat_CL** as the custom log Name and click **Done**""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tomcat/Data%20Connectors/Connector_Tomcat_agent.json","true" -"","Torq","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Torq","torqtechnologiesltd2020","torq_sentinel_solution","2024-12-24","","","Torq Support Team","Partner","https://support.torq.io","","domains","","","","","","","false","","false" -"","Training","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Training","","","","","","","","","","","","","","","","","false","","false" -"","TransmitSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/TransmitSecurity","transmitsecurity","microsoft-sentinel-solution-transmitsecurity","2024-06-10","2024-11-20","","Transmit Security","Partner","https://transmitsecurity.com/support","","domains","","","","","","","false","","false" -"TransmitSecurityActivity_CL","TransmitSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/TransmitSecurity","transmitsecurity","microsoft-sentinel-solution-transmitsecurity","2024-06-10","2024-11-20","","Transmit Security","Partner","https://transmitsecurity.com/support","","domains","TransmitSecurity","TransmitSecurity","Transmit Security Connector","The [Transmit Security] data connector provides the capability to ingest common Transmit Security API events into Microsoft Sentinel through the REST API. [Refer to API documentation for more information](https://developer.transmitsecurity.com/). The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Transmit Security API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Transmit Security API**\n\nFollow the instructions to obtain the credentials.\n\n1. Log in to the Transmit Security Portal.\n2. Configure a [management app](https://developer.transmitsecurity.com/guides/user/management_apps/). Give the app a suitable name, for example, MyAzureSentinelCollector.\n3. Save credentials of the new user for using in the data connector.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Transmit Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Transmit Security data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below.\n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TransmitSecurityAPI-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-TransmitSecurityAPI-azuredeploy-gov)\n\n2. Select the preferred **Subscription**, **Resource Group**, and **Location**.\n\n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select an existing resource group without Windows apps in it or create a new resource group.\n\n3. Enter the **TransmitSecurityClientID**, **TransmitSecurityClientSecret**, **TransmitSecurityPullEndpoint**, **TransmitSecurityTokenEndpoint**, and deploy.\n\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Transmit Security data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS Code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TransmitSecurityAPI-functionapp) file. Extract the archive to your local development computer.\n\n2. Start VS Code. Choose **File** in the main menu and select **Open Folder**.\n\n3. Select the top-level folder from the extracted files.\n\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\n\n If you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**.\n\n If you're already signed in, go to the next step.\n\n5. Provide the following information at the prompts:\n\n a. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n b. **Select Subscription:** Choose the subscription to use.\n\n c. Select **Create new Function App in Azure** (Don't choose the Advanced option).\n\n d. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions.\n\n e. **Select a runtime:** Choose Python 3.11.\n\n f. Select a location for new resources. For better performance and lower costs, choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n\n7. Go to the Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n\n2. Select **Environment variables**.\n\n3. Add each of the following application settings individually, with their respective string values (case-sensitive):\n\n - **TransmitSecurityClientID**\n - **TransmitSecurityClientSecret**\n - **TransmitSecurityPullEndpoint**\n - **TransmitSecurityTokenEndpoint**\n - **WorkspaceID**\n - **WorkspaceKey**\n - **logAnalyticsUri** (optional)\n\n > - Use **logAnalyticsUri** to override the log analytics API endpoint for a dedicated cloud. For example, for the public cloud, leave the value empty; for the Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n\n4. Once all application settings have been entered, click **Apply**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Client ID"", ""description"": ""**TransmitSecurityClientID** is required. See the documentation to learn more about API on the `https://developer.transmitsecurity.com/`.""}, {""name"": ""REST API Client Secret"", ""description"": ""**TransmitSecurityClientSecret** is required. See the documentation to learn more about API on the `https://developer.transmitsecurity.com/`.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/TransmitSecurity/Data%20Connectors/TransmitSecurity_API_FunctionApp.JSON","true" -"","Trend Micro Apex One","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Apex%20One","azuresentinel","azure-sentinel-solution-trendmicroapexone","2021-07-06","2022-03-24","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"CommonSecurityLog","Trend Micro Apex One","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Apex%20One","azuresentinel","azure-sentinel-solution-trendmicroapexone","2021-07-06","2022-03-24","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","TrendMicroApexOne","Trend Micro","[Deprecated] Trend Micro Apex One via Legacy Agent","The [Trend Micro Apex One](https://www.trendmicro.com/en_us/business/products/user-protection/sps/endpoint.html) data connector provides the capability to ingest [Trend Micro Apex One events](https://aka.ms/sentinel-TrendMicroApex-OneEvents) into Microsoft Sentinel. Refer to [Trend Micro Apex Central](https://aka.ms/sentinel-TrendMicroApex-OneCentral) for more information.","[{""title"": """", ""description"": "">This data connector depends on a parser based on a Kusto Function to work as expected [**TMApexOneEvent**](https://aka.ms/sentinel-TMApexOneEvent-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**NOTE:** This data connector has been developed using Trend Micro Apex Central 2019"", ""instructions"": []}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""[Follow these steps](https://docs.trendmicro.com/en-us/enterprise/trend-micro-apex-central-2019-online-help/detections/logs_001/syslog-forwarding.aspx) to configure Apex Central sending alerts via syslog. While configuring, on step 6, select the log format **CEF**.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Apex%20One/Data%20Connectors/TrendMicro_ApexOne.json","true" -"CommonSecurityLog","Trend Micro Apex One","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Apex%20One","azuresentinel","azure-sentinel-solution-trendmicroapexone","2021-07-06","2022-03-24","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","TrendMicroApexOneAma","Trend Micro","[Deprecated] Trend Micro Apex One via AMA","The [Trend Micro Apex One](https://www.trendmicro.com/en_us/business/products/user-protection/sps/endpoint.html) data connector provides the capability to ingest [Trend Micro Apex One events](https://aka.ms/sentinel-TrendMicroApex-OneEvents) into Microsoft Sentinel. Refer to [Trend Micro Apex Central](https://aka.ms/sentinel-TrendMicroApex-OneCentral) for more information.","[{""title"": """", ""description"": "">This data connector depends on a parser based on a Kusto Function to work as expected [**TMApexOneEvent**](https://aka.ms/sentinel-TMApexOneEvent-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""[Follow these steps](https://docs.trendmicro.com/en-us/enterprise/trend-micro-apex-central-2019-online-help/detections/logs_001/syslog-forwarding.aspx) to configure Apex Central sending alerts via syslog. While configuring, on step 6, select the log format **CEF**."", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Apex%20One/Data%20Connectors/template_TrendMicro_ApexOneAMA.json","true" -"","Trend Micro Cloud App Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Cloud%20App%20Security","azuresentinel","azuresentinel.trendmicrocas","2021-09-28","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"TrendMicroCAS_CL","Trend Micro Cloud App Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Cloud%20App%20Security","azuresentinel","azuresentinel.trendmicrocas","2021-09-28","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","TrendMicroCAS","Trend Micro","Trend Micro Cloud App Security","The [Trend Micro Cloud App Security](https://www.trendmicro.com/en_be/business/products/user-protection/sps/email-and-collaboration/cloud-app-security.html) data connector provides the capability to retrieve security event logs of the services that Cloud App Security protects and more events into Microsoft Sentinel through the Log Retrieval API. Refer to API [documentation](https://docs.trendmicro.com/en-us/enterprise/cloud-app-security-integration-api-online-help/supported-cloud-app-/log-retrieval-api/get-security-logs.aspx) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Azure Blob Storage API to pull logs into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**TrendMicroCAS**](https://aka.ms/sentinel-TrendMicroCAS-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Trend Micro Log Retrieval API**\n\n Follow the instructions to obtain the credentials.\n\n1. Obtain the **TrendMicroCASToken** using the [documentation](https://docs.trendmicro.com/en-us/enterprise/cloud-app-security-integration-api-online-help/getting-started-with/generating-an-authen.aspx).\n2. Save credentials for using in the data connector.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Trend Micro Cloud App Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Trend Micro Cloud App Security data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TrendMicroCAS-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **TrendMicroCASToken**, **TrendMicroCASServiceURL** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Trend Micro Cloud App Security data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TMCASAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. TMCASXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tTrendMicroCASToken\n\t\tTrendMicroCASServiceURL\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**TrendMicroCASToken** and **TrendMicroCASServiceURL** are required for making API calls. See the [documentation](https://docs.trendmicro.com/en-us/enterprise/cloud-app-security-integration-api-online-help/getting-started-with/using-cloud-app-secu.aspx) to learn more about API.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Cloud%20App%20Security/Data%20Connectors/TerndMicroCAS_API_FunctionApp.json","true" -"","Trend Micro Deep Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Deep%20Security","trendmicro","trend_micro_deep_security_mss","2022-05-10","","","Trend Micro","Partner","https://success.trendmicro.com/dcx/s/?language=en_US","","domains","","","","","","","false","","false" -"CommonSecurityLog","Trend Micro Deep Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Deep%20Security","trendmicro","trend_micro_deep_security_mss","2022-05-10","","","Trend Micro","Partner","https://success.trendmicro.com/dcx/s/?language=en_US","","domains","TrendMicro","Trend Micro","[Deprecated] Trend Micro Deep Security via Legacy","The Trend Micro Deep Security connector allows you to easily connect your Deep Security logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's networks/systems and improves your security operation capabilities.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Trend Micro Deep Security logs to Syslog agent"", ""description"": ""1. Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure to send the logs to port 514 TCP on the machine's IP address.\n2. Forward Trend Micro Deep Security events to the Syslog agent.\n3. Define a new Syslog Configuration that uses the CEF format by referencing [this knowledge article](https://aka.ms/Sentinel-trendmicro-kblink) for additional information.\n4. Configure the Deep Security Manager to use this new configuration to forward events to the Syslog agent using [these instructions](https://aka.ms/Sentinel-trendMicro-connectorInstructions).\n5. Make sure to save the [TrendMicroDeepSecurity](https://aka.ms/TrendMicroDeepSecurityFunction) function so that it queries the Trend Micro Deep Security data properly.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Deep%20Security/Data%20Connectors/TrendMicroDeepSecurity.json","true" -"","Trend Micro TippingPoint","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20TippingPoint","trendmicro","trend_micro_tippingpoint_mss","2022-05-02","","","Trend Micro","Partner","https://success.trendmicro.com/dcx/s/contactus?language=en_US","","domains","","","","","","","false","","false" -"CommonSecurityLog","Trend Micro TippingPoint","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20TippingPoint","trendmicro","trend_micro_tippingpoint_mss","2022-05-02","","","Trend Micro","Partner","https://success.trendmicro.com/dcx/s/contactus?language=en_US","","domains","TrendMicroTippingPoint","Trend Micro","[Deprecated] Trend Micro TippingPoint via Legacy","The Trend Micro TippingPoint connector allows you to easily connect your TippingPoint SMS IPS events with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's networks/systems and improves your security operation capabilities.","[{""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias TrendMicroTippingPoint and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20TippingPoint/Parsers/TrendMicroTippingPoint).The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Trend Micro TippingPoint SMS logs to Syslog agent"", ""description"": ""Set your TippingPoint SMS to send Syslog messages in ArcSight CEF Format v4.2 format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20TippingPoint/Data%20Connectors/TrendMicroTippingPoint.json","true" -"","Trend Micro Vision One","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Vision%20One","trendmicro","trend_micro_vision_one_xdr_mss","2022-05-11","2024-07-16","","Trend Micro","Partner","https://success.trendmicro.com/dcx/s/?language=en_US","","domains","","","","","","","false","","false" -"TrendMicro_XDR_OAT_CL","Trend Micro Vision One","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Vision%20One","trendmicro","trend_micro_vision_one_xdr_mss","2022-05-11","2024-07-16","","Trend Micro","Partner","https://success.trendmicro.com/dcx/s/?language=en_US","","domains","TrendMicroXDR","Trend Micro","Trend Vision One","The [Trend Vision One](https://www.trendmicro.com/en_us/business/products/detection-response/xdr.html) connector allows you to easily connect your Workbench alert data with Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities. This gives you more insight into your organization's networks/systems and improves your security operation capabilities.

The Trend Vision One connector is supported in Microsoft Sentinel in the following regions: Australia East, Australia Southeast, Brazil South, Canada Central, Canada East, Central India, Central US, East Asia, East US, East US 2, France Central, Japan East, Korea Central, North Central US, North Europe, Norway East, South Africa North, South Central US, Southeast Asia, Sweden Central, Switzerland North, UAE North, UK South, UK West, West Europe, West US, West US 2, West US 3.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Trend Vision One API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Trend Vision One API**\n\n [Follow these instructions](https://docs.trendmicro.com/documentation/article/trend-vision-one-api-keys-third-party-apps) to create an account and an API authentication token.""}, {""title"": """", ""description"": ""**STEP 2 - Use the below deployment option to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Trend Vision One connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Trend Vision One API Authorization Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template Deployment"", ""description"": ""This method provides an automated deployment of the Trend Vision One connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-trendmicroxdr-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter a unique **Function Name**, **Workspace ID**, **Workspace Key**, **API Token** and **Region Code**. \n - Note: Provide the appropriate region code based on where your Trend Vision One instance is deployed: us, eu, au, in, sg, jp \n - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Trend Vision One API Token"", ""description"": ""A Trend Vision One API Token is required. See the documentation to learn more about the [Trend Vision One API](https://docs.trendmicro.com/documentation/article/trend-vision-one-api-keys-third-party-apps).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Vision%20One/Data%20Connectors/TrendMicroXDR.json","true" -"TrendMicro_XDR_RCA_Result_CL","Trend Micro Vision One","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Vision%20One","trendmicro","trend_micro_vision_one_xdr_mss","2022-05-11","2024-07-16","","Trend Micro","Partner","https://success.trendmicro.com/dcx/s/?language=en_US","","domains","TrendMicroXDR","Trend Micro","Trend Vision One","The [Trend Vision One](https://www.trendmicro.com/en_us/business/products/detection-response/xdr.html) connector allows you to easily connect your Workbench alert data with Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities. This gives you more insight into your organization's networks/systems and improves your security operation capabilities.

The Trend Vision One connector is supported in Microsoft Sentinel in the following regions: Australia East, Australia Southeast, Brazil South, Canada Central, Canada East, Central India, Central US, East Asia, East US, East US 2, France Central, Japan East, Korea Central, North Central US, North Europe, Norway East, South Africa North, South Central US, Southeast Asia, Sweden Central, Switzerland North, UAE North, UK South, UK West, West Europe, West US, West US 2, West US 3.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Trend Vision One API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Trend Vision One API**\n\n [Follow these instructions](https://docs.trendmicro.com/documentation/article/trend-vision-one-api-keys-third-party-apps) to create an account and an API authentication token.""}, {""title"": """", ""description"": ""**STEP 2 - Use the below deployment option to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Trend Vision One connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Trend Vision One API Authorization Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template Deployment"", ""description"": ""This method provides an automated deployment of the Trend Vision One connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-trendmicroxdr-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter a unique **Function Name**, **Workspace ID**, **Workspace Key**, **API Token** and **Region Code**. \n - Note: Provide the appropriate region code based on where your Trend Vision One instance is deployed: us, eu, au, in, sg, jp \n - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Trend Vision One API Token"", ""description"": ""A Trend Vision One API Token is required. See the documentation to learn more about the [Trend Vision One API](https://docs.trendmicro.com/documentation/article/trend-vision-one-api-keys-third-party-apps).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Vision%20One/Data%20Connectors/TrendMicroXDR.json","true" -"TrendMicro_XDR_RCA_Task_CL","Trend Micro Vision One","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Vision%20One","trendmicro","trend_micro_vision_one_xdr_mss","2022-05-11","2024-07-16","","Trend Micro","Partner","https://success.trendmicro.com/dcx/s/?language=en_US","","domains","TrendMicroXDR","Trend Micro","Trend Vision One","The [Trend Vision One](https://www.trendmicro.com/en_us/business/products/detection-response/xdr.html) connector allows you to easily connect your Workbench alert data with Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities. This gives you more insight into your organization's networks/systems and improves your security operation capabilities.

The Trend Vision One connector is supported in Microsoft Sentinel in the following regions: Australia East, Australia Southeast, Brazil South, Canada Central, Canada East, Central India, Central US, East Asia, East US, East US 2, France Central, Japan East, Korea Central, North Central US, North Europe, Norway East, South Africa North, South Central US, Southeast Asia, Sweden Central, Switzerland North, UAE North, UK South, UK West, West Europe, West US, West US 2, West US 3.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Trend Vision One API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Trend Vision One API**\n\n [Follow these instructions](https://docs.trendmicro.com/documentation/article/trend-vision-one-api-keys-third-party-apps) to create an account and an API authentication token.""}, {""title"": """", ""description"": ""**STEP 2 - Use the below deployment option to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Trend Vision One connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Trend Vision One API Authorization Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template Deployment"", ""description"": ""This method provides an automated deployment of the Trend Vision One connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-trendmicroxdr-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter a unique **Function Name**, **Workspace ID**, **Workspace Key**, **API Token** and **Region Code**. \n - Note: Provide the appropriate region code based on where your Trend Vision One instance is deployed: us, eu, au, in, sg, jp \n - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Trend Vision One API Token"", ""description"": ""A Trend Vision One API Token is required. See the documentation to learn more about the [Trend Vision One API](https://docs.trendmicro.com/documentation/article/trend-vision-one-api-keys-third-party-apps).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Vision%20One/Data%20Connectors/TrendMicroXDR.json","true" -"TrendMicro_XDR_WORKBENCH_CL","Trend Micro Vision One","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Vision%20One","trendmicro","trend_micro_vision_one_xdr_mss","2022-05-11","2024-07-16","","Trend Micro","Partner","https://success.trendmicro.com/dcx/s/?language=en_US","","domains","TrendMicroXDR","Trend Micro","Trend Vision One","The [Trend Vision One](https://www.trendmicro.com/en_us/business/products/detection-response/xdr.html) connector allows you to easily connect your Workbench alert data with Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities. This gives you more insight into your organization's networks/systems and improves your security operation capabilities.

The Trend Vision One connector is supported in Microsoft Sentinel in the following regions: Australia East, Australia Southeast, Brazil South, Canada Central, Canada East, Central India, Central US, East Asia, East US, East US 2, France Central, Japan East, Korea Central, North Central US, North Europe, Norway East, South Africa North, South Central US, Southeast Asia, Sweden Central, Switzerland North, UAE North, UK South, UK West, West Europe, West US, West US 2, West US 3.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Trend Vision One API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Trend Vision One API**\n\n [Follow these instructions](https://docs.trendmicro.com/documentation/article/trend-vision-one-api-keys-third-party-apps) to create an account and an API authentication token.""}, {""title"": """", ""description"": ""**STEP 2 - Use the below deployment option to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Trend Vision One connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Trend Vision One API Authorization Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template Deployment"", ""description"": ""This method provides an automated deployment of the Trend Vision One connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-trendmicroxdr-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter a unique **Function Name**, **Workspace ID**, **Workspace Key**, **API Token** and **Region Code**. \n - Note: Provide the appropriate region code based on where your Trend Vision One instance is deployed: us, eu, au, in, sg, jp \n - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Trend Vision One API Token"", ""description"": ""A Trend Vision One API Token is required. See the documentation to learn more about the [Trend Vision One API](https://docs.trendmicro.com/documentation/article/trend-vision-one-api-keys-third-party-apps).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Vision%20One/Data%20Connectors/TrendMicroXDR.json","true" -"","Tropico","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tropico","tropico","azure-sentinel-solution-tropico","2025-12-02","","","TROPICO Security","Partner","https://tropicosecurity.com/","","domains","","","","","","","false","","false" -"","UEBA Essentials","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/UEBA%20Essentials","azuresentinel","azure-sentinel-solution-uebaessentials","2022-06-27","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"","URLhaus","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/URLhaus","azuresentinel","azure-sentinel-solution-urlhaus","2022-09-29","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"","Ubiquiti UniFi","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Ubiquiti%20UniFi","azuresentinel","azure-sentinel-solution-ubiquitiunifi","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"Ubiquiti_CL","Ubiquiti UniFi","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Ubiquiti%20UniFi","azuresentinel","azure-sentinel-solution-ubiquitiunifi","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","UbiquitiUnifi","Ubiquiti","[Deprecated] Ubiquiti UniFi","The [Ubiquiti UniFi](https://www.ui.com/) data connector provides the capability to ingest [Ubiquiti UniFi firewall, dns, ssh, AP events](https://help.ui.com/hc/en-us/articles/204959834-UniFi-How-to-View-Log-Files) into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**UbiquitiAuditEvent**](https://aka.ms/sentinel-UbiquitiUnifi-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**NOTE:** This data connector has been developed using Enterprise System Controller Release Version: 5.6.2 (Syslog)"", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Server to which the Ubiquiti logs are forwarder from Ubiquiti device (e.g.remote syslog server)\n\n> Logs from Ubiquiti Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Follow the configuration steps below to get Ubiquiti logs into Microsoft Sentinel. Refer to the [Azure Monitor Documentation](https://docs.microsoft.com/azure/azure-monitor/agents/data-sources-json) for more details on these steps.\n1. Configure log forwarding on your Ubiquiti controller: \n\n\t i. Go to Settings > System Setting > Controller Configuration > Remote Logging and enable the Syslog and Debugging (optional) logs (Refer to [User Guide](https://dl.ui.com/guides/UniFi/UniFi_Controller_V5_UG.pdf) for detailed instructions).\n2. Download config file [Ubiquiti.conf](https://aka.ms/sentinel-UbiquitiUnifi-conf).\n3. Login to the server where you have installed Azure Log Analytics agent.\n4. Copy Ubiquiti.conf to the /etc/opt/microsoft/omsagent/**workspace_id**/conf/omsagent.d/ folder.\n5. Edit Ubiquiti.conf as follows:\n\n\t i. specify port which you have set your Ubiquiti device to forward logs to (line 4)\n\n\t ii. replace **workspace_id** with real value of your Workspace ID (lines 14,15,16,19)\n5. Save changes and restart the Azure Log Analytics agent for Linux service with the following command:\n\t\tsudo /opt/microsoft/omsagent/bin/service_control restart"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Ubiquiti%20UniFi/Data%20Connectors/Connector_Ubiquiti_agent.json","true" -"","VMRay","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMRay","vmraygmbh1623334327435","microsoft-sentinel-solution-vmray","2025-07-23","","","VMRay","Partner","https://www.vmray.com/contact/customer-support/","","domains","","","","","","","false","","false" -"ThreatIntelligenceIndicator","VMRay","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMRay","vmraygmbh1623334327435","microsoft-sentinel-solution-vmray","2025-07-23","","","VMRay","Partner","https://www.vmray.com/contact/customer-support/","","domains","VMRay","VMRay","VMRayThreatIntelligence","VMRayThreatIntelligence connector automatically generates and feeds threat intelligence for all submissions to VMRay, improving threat detection and incident response in Sentinel. This seamless integration empowers teams to proactively address emerging threats.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the VMRay API to pull VMRay Threat IOCs into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Deploy VMRay Threat Intelligence Connector"", ""description"": ""1. Ensure you have all the required prerequisites: **Client ID**, **Tenant ID**, **Client Secret**, **VMRay API Key**, and **VMRay Base URL**.\n2. To obtain the Client ID, Client Secret, and Tenant ID, [follow these instructions](https://github.com/Azure/Azure-Sentinel/tree/master/Solutions/VMRay#vmray-configurations)\n3. For the **Flex Consumption Plan**, click the **Deploy to Azure** button below:\n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-VMRay-azuredeployflex)\n\n4. For the **Premium Plan**, click the **Deploy to Azure** button below:\n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-VMRay-azuredeploypremium).""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**VMRay API Key** is required.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMRay/Data%20Connectors/VMRayThreatIntelligence_FunctionApp.json","true" -"","VMWareESXi","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMWareESXi","azuresentinel","azure-sentinel-solution-vmwareesxi","2022-01-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"Syslog","VMWareESXi","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMWareESXi","azuresentinel","azure-sentinel-solution-vmwareesxi","2022-01-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","VMwareESXi","VMWare","[Deprecated] VMware ESXi","The [VMware ESXi](https://www.vmware.com/products/esxi-and-esx.html) connector allows you to easily connect your VMWare ESXi logs with Microsoft Sentinel This gives you more insight into your organization's ESXi servers and improves your security operation capabilities.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias VMwareESXi and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMWareESXi/Parsers/VMwareESXi.yaml), on the second line of the query, enter the hostname(s) of your VMwareESXi device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n 2. Select **Apply below configuration to my machines** and select the facilities and severities.\n 3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure and connect the VMware ESXi"", ""description"": ""1. Follow these instructions to configure the VMWare ESXi to forward syslog: \n - [VMware ESXi 3.5 and 4.x](https://kb.vmware.com/s/article/1016621) \n - [VMware ESXi 5.0+](https://docs.vmware.com/en/VMware-vSphere/5.5/com.vmware.vsphere.monitoring.doc/GUID-9F67DB52-F469-451F-B6C8-DAE8D95976E7.html)\n2. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""VMwareESXi"", ""description"": ""must be configured to export logs via Syslog""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMWareESXi/Data%20Connectors/Connector_Syslog_VMwareESXi.json","true" -"","VMware Carbon Black Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud","azuresentinel","azure-sentinel-solution-vmwarecarbonblack","2022-06-01","","","Microsoft","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"CarbonBlackAuditLogs_CL","VMware Carbon Black Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud","azuresentinel","azure-sentinel-solution-vmwarecarbonblack","2022-06-01","","","Microsoft","Microsoft","https://support.microsoft.com/","","domains","VMwareCarbonBlack","VMware","VMware Carbon Black Cloud","The [VMware Carbon Black Cloud](https://www.vmware.com/products/carbon-black-cloud.html) connector provides the capability to ingest Carbon Black data into Microsoft Sentinel. The connector provides visibility into Audit, Notification and Event logs in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to VMware Carbon Black to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the VMware Carbon Black API**\n\n [Follow these instructions](https://developer.carbonblack.com/reference/carbon-black-cloud/authentication/#creating-an-api-key) to create an API Key.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the VMware Carbon Black connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the VMware Carbon Black API Authorization Key(s), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""This method provides an automated deployment of the VMware Carbon Black connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelcarbonblackazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelcarbonblackazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **Log Types**, **API ID(s)**, **API Key(s)**, **Carbon Black Org Key**, **S3 Bucket Name**, **AWS Access Key Id**, **AWS Secret Access Key**, **EventPrefixFolderName**,**AlertPrefixFolderName**, and validate the **URI**.\n> - Enter the URI that corresponds to your region. The complete list of API URLs can be [found here](https://community.carbonblack.com/t5/Knowledge-Base/PSC-What-URLs-are-used-to-access-the-APIs/ta-p/67346)\n - The default **Time Interval** is set to pull the last five (5) minutes of data. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion. \n - Carbon Black requires a seperate set of API ID/Keys to ingest Notification alerts. Enter the SIEM API ID/Key values or leave blank, if not required. \n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the VMware Carbon Black connector manually with Azure Functions.""}, {""title"": """", ""description"": ""**1. Create a Function App**\n\n1. From the Azure Portal, navigate to [Function App](https://portal.azure.com/#blade/HubsExtension/BrowseResource/resourceType/Microsoft.Web%2Fsites/kind/functionapp), and select **+ Add**.\n2. In the **Basics** tab, ensure Runtime stack is set to **Powershell Core**. \n3. In the **Hosting** tab, ensure the **Consumption (Serverless)** plan type is selected.\n4. Make other preferrable configuration changes, if needed, then click **Create**.""}, {""title"": """", ""description"": ""**2. Import Function App Code**\n\n1. In the newly created Function App, select **Functions** on the left pane and click **+ Add**.\n2. Select **Timer Trigger**.\n3. Enter a unique Function **Name** and modify the cron schedule, if needed. The default value is set to run the Function App every 5 minutes. (Note: the Timer trigger should match the `timeInterval` value below to prevent overlapping data), click **Create**.\n4. Click on **Code + Test** on the left pane. \n5. Copy the [Function App Code](https://aka.ms/sentinelcarbonblackazurefunctioncode) and paste into the Function App `run.ps1` editor.\n5. Click **Save**.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following thirteen to sixteen (13-16) application settings individually, with their respective string values (case-sensitive): \n\t\tapiId\n\t\tapiKey\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\turi\n\t\ttimeInterval\n\t\tCarbonBlackOrgKey\n\t\tCarbonBlackLogTypes \n\t\ts3BucketName \n\t\tEventPrefixFolderName \n\t\tAlertPrefixFolderName \n\t\tAWSAccessKeyId \n\t\tAWSSecretAccessKey \n\t\tSIEMapiId (Optional)\n\t\tSIEMapiKey (Optional)\n\t\tlogAnalyticsUri (optional) \n> - Enter the URI that corresponds to your region. The complete list of API URLs can be [found here](https://community.carbonblack.com/t5/Knowledge-Base/PSC-What-URLs-are-used-to-access-the-APIs/ta-p/67346). The `uri` value must follow the following schema: `https://.conferdeploy.net` - There is no need to add a time suffix to the URI, the Function App will dynamically append the Time Value to the URI in the proper format.\n> - Set the `timeInterval` (in minutes) to the default value of `5` to correspond to the default Timer Trigger of every `5` minutes. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly to prevent overlapping data ingestion.\n> - Carbon Black requires a seperate set of API ID/Keys to ingest Notification alerts. Enter the `SIEMapiId` and `SIEMapiKey` values, if needed, or omit, if not required. \n> - Note: If using Azure Key Vault, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""VMware Carbon Black API Key(s)"", ""description"": ""Carbon Black API and/or SIEM Level API Key(s) are required. See the documentation to learn more about the [Carbon Black API](https://developer.carbonblack.com/reference/carbon-black-cloud/cb-defense/latest/rest-api/).\n - A Carbon Black **API** access level API ID and Key is required for [Audit](https://developer.carbonblack.com/reference/carbon-black-cloud/cb-defense/latest/rest-api/#audit-log-events) and [Event](https://developer.carbonblack.com/reference/carbon-black-cloud/platform/latest/data-forwarder-config-api/) logs. \n - A Carbon Black **SIEM** access level API ID and Key is required for [Notification](https://developer.carbonblack.com/reference/carbon-black-cloud/cb-defense/latest/rest-api/#notifications) alerts.""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name**, **Folder Name in AWS S3 Bucket** are required for Amazon S3 REST API.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/VMwareCarbonBlack_API_FunctionApp.json","true" -"CarbonBlackEvents_CL","VMware Carbon Black Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud","azuresentinel","azure-sentinel-solution-vmwarecarbonblack","2022-06-01","","","Microsoft","Microsoft","https://support.microsoft.com/","","domains","VMwareCarbonBlack","VMware","VMware Carbon Black Cloud","The [VMware Carbon Black Cloud](https://www.vmware.com/products/carbon-black-cloud.html) connector provides the capability to ingest Carbon Black data into Microsoft Sentinel. The connector provides visibility into Audit, Notification and Event logs in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to VMware Carbon Black to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the VMware Carbon Black API**\n\n [Follow these instructions](https://developer.carbonblack.com/reference/carbon-black-cloud/authentication/#creating-an-api-key) to create an API Key.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the VMware Carbon Black connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the VMware Carbon Black API Authorization Key(s), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""This method provides an automated deployment of the VMware Carbon Black connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelcarbonblackazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelcarbonblackazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **Log Types**, **API ID(s)**, **API Key(s)**, **Carbon Black Org Key**, **S3 Bucket Name**, **AWS Access Key Id**, **AWS Secret Access Key**, **EventPrefixFolderName**,**AlertPrefixFolderName**, and validate the **URI**.\n> - Enter the URI that corresponds to your region. The complete list of API URLs can be [found here](https://community.carbonblack.com/t5/Knowledge-Base/PSC-What-URLs-are-used-to-access-the-APIs/ta-p/67346)\n - The default **Time Interval** is set to pull the last five (5) minutes of data. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion. \n - Carbon Black requires a seperate set of API ID/Keys to ingest Notification alerts. Enter the SIEM API ID/Key values or leave blank, if not required. \n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the VMware Carbon Black connector manually with Azure Functions.""}, {""title"": """", ""description"": ""**1. Create a Function App**\n\n1. From the Azure Portal, navigate to [Function App](https://portal.azure.com/#blade/HubsExtension/BrowseResource/resourceType/Microsoft.Web%2Fsites/kind/functionapp), and select **+ Add**.\n2. In the **Basics** tab, ensure Runtime stack is set to **Powershell Core**. \n3. In the **Hosting** tab, ensure the **Consumption (Serverless)** plan type is selected.\n4. Make other preferrable configuration changes, if needed, then click **Create**.""}, {""title"": """", ""description"": ""**2. Import Function App Code**\n\n1. In the newly created Function App, select **Functions** on the left pane and click **+ Add**.\n2. Select **Timer Trigger**.\n3. Enter a unique Function **Name** and modify the cron schedule, if needed. The default value is set to run the Function App every 5 minutes. (Note: the Timer trigger should match the `timeInterval` value below to prevent overlapping data), click **Create**.\n4. Click on **Code + Test** on the left pane. \n5. Copy the [Function App Code](https://aka.ms/sentinelcarbonblackazurefunctioncode) and paste into the Function App `run.ps1` editor.\n5. Click **Save**.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following thirteen to sixteen (13-16) application settings individually, with their respective string values (case-sensitive): \n\t\tapiId\n\t\tapiKey\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\turi\n\t\ttimeInterval\n\t\tCarbonBlackOrgKey\n\t\tCarbonBlackLogTypes \n\t\ts3BucketName \n\t\tEventPrefixFolderName \n\t\tAlertPrefixFolderName \n\t\tAWSAccessKeyId \n\t\tAWSSecretAccessKey \n\t\tSIEMapiId (Optional)\n\t\tSIEMapiKey (Optional)\n\t\tlogAnalyticsUri (optional) \n> - Enter the URI that corresponds to your region. The complete list of API URLs can be [found here](https://community.carbonblack.com/t5/Knowledge-Base/PSC-What-URLs-are-used-to-access-the-APIs/ta-p/67346). The `uri` value must follow the following schema: `https://.conferdeploy.net` - There is no need to add a time suffix to the URI, the Function App will dynamically append the Time Value to the URI in the proper format.\n> - Set the `timeInterval` (in minutes) to the default value of `5` to correspond to the default Timer Trigger of every `5` minutes. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly to prevent overlapping data ingestion.\n> - Carbon Black requires a seperate set of API ID/Keys to ingest Notification alerts. Enter the `SIEMapiId` and `SIEMapiKey` values, if needed, or omit, if not required. \n> - Note: If using Azure Key Vault, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""VMware Carbon Black API Key(s)"", ""description"": ""Carbon Black API and/or SIEM Level API Key(s) are required. See the documentation to learn more about the [Carbon Black API](https://developer.carbonblack.com/reference/carbon-black-cloud/cb-defense/latest/rest-api/).\n - A Carbon Black **API** access level API ID and Key is required for [Audit](https://developer.carbonblack.com/reference/carbon-black-cloud/cb-defense/latest/rest-api/#audit-log-events) and [Event](https://developer.carbonblack.com/reference/carbon-black-cloud/platform/latest/data-forwarder-config-api/) logs. \n - A Carbon Black **SIEM** access level API ID and Key is required for [Notification](https://developer.carbonblack.com/reference/carbon-black-cloud/cb-defense/latest/rest-api/#notifications) alerts.""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name**, **Folder Name in AWS S3 Bucket** are required for Amazon S3 REST API.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/VMwareCarbonBlack_API_FunctionApp.json","true" -"CarbonBlackNotifications_CL","VMware Carbon Black Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud","azuresentinel","azure-sentinel-solution-vmwarecarbonblack","2022-06-01","","","Microsoft","Microsoft","https://support.microsoft.com/","","domains","VMwareCarbonBlack","VMware","VMware Carbon Black Cloud","The [VMware Carbon Black Cloud](https://www.vmware.com/products/carbon-black-cloud.html) connector provides the capability to ingest Carbon Black data into Microsoft Sentinel. The connector provides visibility into Audit, Notification and Event logs in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to VMware Carbon Black to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the VMware Carbon Black API**\n\n [Follow these instructions](https://developer.carbonblack.com/reference/carbon-black-cloud/authentication/#creating-an-api-key) to create an API Key.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the VMware Carbon Black connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the VMware Carbon Black API Authorization Key(s), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""This method provides an automated deployment of the VMware Carbon Black connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelcarbonblackazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelcarbonblackazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **Log Types**, **API ID(s)**, **API Key(s)**, **Carbon Black Org Key**, **S3 Bucket Name**, **AWS Access Key Id**, **AWS Secret Access Key**, **EventPrefixFolderName**,**AlertPrefixFolderName**, and validate the **URI**.\n> - Enter the URI that corresponds to your region. The complete list of API URLs can be [found here](https://community.carbonblack.com/t5/Knowledge-Base/PSC-What-URLs-are-used-to-access-the-APIs/ta-p/67346)\n - The default **Time Interval** is set to pull the last five (5) minutes of data. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion. \n - Carbon Black requires a seperate set of API ID/Keys to ingest Notification alerts. Enter the SIEM API ID/Key values or leave blank, if not required. \n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the VMware Carbon Black connector manually with Azure Functions.""}, {""title"": """", ""description"": ""**1. Create a Function App**\n\n1. From the Azure Portal, navigate to [Function App](https://portal.azure.com/#blade/HubsExtension/BrowseResource/resourceType/Microsoft.Web%2Fsites/kind/functionapp), and select **+ Add**.\n2. In the **Basics** tab, ensure Runtime stack is set to **Powershell Core**. \n3. In the **Hosting** tab, ensure the **Consumption (Serverless)** plan type is selected.\n4. Make other preferrable configuration changes, if needed, then click **Create**.""}, {""title"": """", ""description"": ""**2. Import Function App Code**\n\n1. In the newly created Function App, select **Functions** on the left pane and click **+ Add**.\n2. Select **Timer Trigger**.\n3. Enter a unique Function **Name** and modify the cron schedule, if needed. The default value is set to run the Function App every 5 minutes. (Note: the Timer trigger should match the `timeInterval` value below to prevent overlapping data), click **Create**.\n4. Click on **Code + Test** on the left pane. \n5. Copy the [Function App Code](https://aka.ms/sentinelcarbonblackazurefunctioncode) and paste into the Function App `run.ps1` editor.\n5. Click **Save**.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following thirteen to sixteen (13-16) application settings individually, with their respective string values (case-sensitive): \n\t\tapiId\n\t\tapiKey\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\turi\n\t\ttimeInterval\n\t\tCarbonBlackOrgKey\n\t\tCarbonBlackLogTypes \n\t\ts3BucketName \n\t\tEventPrefixFolderName \n\t\tAlertPrefixFolderName \n\t\tAWSAccessKeyId \n\t\tAWSSecretAccessKey \n\t\tSIEMapiId (Optional)\n\t\tSIEMapiKey (Optional)\n\t\tlogAnalyticsUri (optional) \n> - Enter the URI that corresponds to your region. The complete list of API URLs can be [found here](https://community.carbonblack.com/t5/Knowledge-Base/PSC-What-URLs-are-used-to-access-the-APIs/ta-p/67346). The `uri` value must follow the following schema: `https://.conferdeploy.net` - There is no need to add a time suffix to the URI, the Function App will dynamically append the Time Value to the URI in the proper format.\n> - Set the `timeInterval` (in minutes) to the default value of `5` to correspond to the default Timer Trigger of every `5` minutes. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly to prevent overlapping data ingestion.\n> - Carbon Black requires a seperate set of API ID/Keys to ingest Notification alerts. Enter the `SIEMapiId` and `SIEMapiKey` values, if needed, or omit, if not required. \n> - Note: If using Azure Key Vault, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""VMware Carbon Black API Key(s)"", ""description"": ""Carbon Black API and/or SIEM Level API Key(s) are required. See the documentation to learn more about the [Carbon Black API](https://developer.carbonblack.com/reference/carbon-black-cloud/cb-defense/latest/rest-api/).\n - A Carbon Black **API** access level API ID and Key is required for [Audit](https://developer.carbonblack.com/reference/carbon-black-cloud/cb-defense/latest/rest-api/#audit-log-events) and [Event](https://developer.carbonblack.com/reference/carbon-black-cloud/platform/latest/data-forwarder-config-api/) logs. \n - A Carbon Black **SIEM** access level API ID and Key is required for [Notification](https://developer.carbonblack.com/reference/carbon-black-cloud/cb-defense/latest/rest-api/#notifications) alerts.""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name**, **Folder Name in AWS S3 Bucket** are required for Amazon S3 REST API.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/VMwareCarbonBlack_API_FunctionApp.json","true" -"CarbonBlack_Alerts_CL","VMware Carbon Black Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud","azuresentinel","azure-sentinel-solution-vmwarecarbonblack","2022-06-01","","","Microsoft","Microsoft","https://support.microsoft.com/","","domains","carbonBlackAWSS3","Microsoft","VMware Carbon Black Cloud via AWS S3","The [VMware Carbon Black Cloud](https://www.vmware.com/products/carbon-black-cloud.html) via AWS S3 data connector provides the capability to ingest watchlist, alerts, auth and endpoints events via AWS S3 and stream them to ASIM normalized tables. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. AWS CloudFormation Deployment \n To configure access on AWS, two templates has been generated to set up the AWS environment to send logs from S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create) \n 2. In AWS, choose the 'Upload a template file' option and click on 'Choose file'. Select the downloaded template \n 3. Click 'Next' and 'Create stack'""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS Carbon Black resources deployment"", ""isMultiLine"": true, ""fillWith"": [""CarbonBlack""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""When deploying 'Template 2: AWS Carbon Black resources deployment' template you'll need supply a few parameters \n * **Stack Name**: A stack name of your choosing (will appear in the list of stacks in AWS)\n * **Role Name**: Must begin with 'OIDC_' prefix, has a default value. \n * **Bucket Name**: Bucket name of your choosing, if you already have an existing bucket paste the name here \n * **CreateNewBucket**: If you already have an existing bucket that you would like to use for this connector select 'false' for this option, otherwise a bucket with the name you entered in 'Bucket Name' will be created from this stack. \n * **Region**: This is the region of the AWS resources based on Carbon Black's mapping - for more information please see [Carbon Black documentation](https://developer.carbonblack.com/reference/carbon-black-cloud/integrations/data-forwarder/quick-setup/#create-a-bucket).\n * **SQSQueuePrefix**: The stack create multiple queues, this prefix will be added to each one of them. \n * **WorkspaceID**: Use the Workspace ID provided below.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""Once the deployment is complete - head to the 'Outputs' tab, you will see: Role ARN, S3 bucket and 4 SQS resources created. You will need those resources in the next step when configuring Carbon Black's data forwarders and the data connector.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Carbon Black data forwarder configuration \n After all AWS resources has been created you'll need to configure Carbon Black to forward the events to the AWS buckets for Microsoft Sentinel to ingest them. Follow [Carbon Black's documentation on how to create a 'Data Forwarders'](https://developer.carbonblack.com/reference/carbon-black-cloud/integrations/data-forwarder/quick-setup/#2-create-a-forwarder) Use the first recommended option. When asked to input a bucket name use the bucket created in the previous step. \n You will be required to add 'S3 prefix' for each forwarder, please use this mapping:""}}, {""type"": ""Markdown"", ""parameters"": {""content"": "" | Event type | S3 prefix | \n |-----------------|-----------|\n | Alert | carbon-black-cloud-forwarder/Alerts |\n | Auth Events | carbon-black-cloud-forwarder/Auth |\n | Endpoint Events | carbon-black-cloud-forwarder/Endpoint |\n | Watchlist Hit | carbon-black-cloud-forwarder/Watchlist |""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2.1. Test your data forwarder (Optional) \n To validate the data forwarder is configured as expected, in Carbon Black's portal search for the data forwarder that you just created and click on 'Test Forwarder' button under the 'Actions' column, this will generate a 'HealthCheck' file in the S3 Bucket, you should see it appear immediately.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the 'Add new collector' button, fill the required information, the ARN role and the SQS URL are created in step 1, note that you will need to enter the correct SQS URL and select the appropriate event type from the dropdown, for example if you want to ingest Alert events you will need to copy the Alerts SQS URL and select the 'Alerts' event type in the dropdown""}}]}, {""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-CarbonBlackAlertsStream"", ""text"": ""Alerts""}, {""key"": ""Custom-CarbonBlackAuthStream"", ""text"": ""Auth Events""}, {""key"": ""Custom-CarbonBlackEndpointStream"", ""text"": ""Endpoint Events""}, {""key"": ""Custom-CarbonBlackWatchlistStream"", ""text"": ""Watchlist""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Environment"", ""description"": ""You must have the following AWS resources defined and configured: S3, Simple Queue Service (SQS), IAM roles and permissions policies""}, {""name"": ""Environment"", ""description"": ""You must have the a Carbon black account and required permissions to create a Data Forwarded to AWS S3 buckets. \nFor more details visit [Carbon Black Data Forwarder Docs](https://docs.vmware.com/en/VMware-Carbon-Black-Cloud/services/carbon-black-cloud-user-guide/GUID-E8D33F72-BABB-4157-A908-D8BBDB5AF349.html)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/CarbonBlackViaAWSS3_ConnectorDefinition.json;https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/VMwareCarbonBlackCloud_ccp/CarbonBlack_DataConnectorDefination.json","false" -"CarbonBlack_Watchlist_CL","VMware Carbon Black Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud","azuresentinel","azure-sentinel-solution-vmwarecarbonblack","2022-06-01","","","Microsoft","Microsoft","https://support.microsoft.com/","","domains","carbonBlackAWSS3","Microsoft","VMware Carbon Black Cloud via AWS S3","The [VMware Carbon Black Cloud](https://www.vmware.com/products/carbon-black-cloud.html) via AWS S3 data connector provides the capability to ingest watchlist, alerts, auth and endpoints events via AWS S3 and stream them to ASIM normalized tables. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. AWS CloudFormation Deployment \n To configure access on AWS, two templates has been generated to set up the AWS environment to send logs from S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create) \n 2. In AWS, choose the 'Upload a template file' option and click on 'Choose file'. Select the downloaded template \n 3. Click 'Next' and 'Create stack'""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS Carbon Black resources deployment"", ""isMultiLine"": true, ""fillWith"": [""CarbonBlack""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""When deploying 'Template 2: AWS Carbon Black resources deployment' template you'll need supply a few parameters \n * **Stack Name**: A stack name of your choosing (will appear in the list of stacks in AWS)\n * **Role Name**: Must begin with 'OIDC_' prefix, has a default value. \n * **Bucket Name**: Bucket name of your choosing, if you already have an existing bucket paste the name here \n * **CreateNewBucket**: If you already have an existing bucket that you would like to use for this connector select 'false' for this option, otherwise a bucket with the name you entered in 'Bucket Name' will be created from this stack. \n * **Region**: This is the region of the AWS resources based on Carbon Black's mapping - for more information please see [Carbon Black documentation](https://developer.carbonblack.com/reference/carbon-black-cloud/integrations/data-forwarder/quick-setup/#create-a-bucket).\n * **SQSQueuePrefix**: The stack create multiple queues, this prefix will be added to each one of them. \n * **WorkspaceID**: Use the Workspace ID provided below.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""Once the deployment is complete - head to the 'Outputs' tab, you will see: Role ARN, S3 bucket and 4 SQS resources created. You will need those resources in the next step when configuring Carbon Black's data forwarders and the data connector.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Carbon Black data forwarder configuration \n After all AWS resources has been created you'll need to configure Carbon Black to forward the events to the AWS buckets for Microsoft Sentinel to ingest them. Follow [Carbon Black's documentation on how to create a 'Data Forwarders'](https://developer.carbonblack.com/reference/carbon-black-cloud/integrations/data-forwarder/quick-setup/#2-create-a-forwarder) Use the first recommended option. When asked to input a bucket name use the bucket created in the previous step. \n You will be required to add 'S3 prefix' for each forwarder, please use this mapping:""}}, {""type"": ""Markdown"", ""parameters"": {""content"": "" | Event type | S3 prefix | \n |-----------------|-----------|\n | Alert | carbon-black-cloud-forwarder/Alerts |\n | Auth Events | carbon-black-cloud-forwarder/Auth |\n | Endpoint Events | carbon-black-cloud-forwarder/Endpoint |\n | Watchlist Hit | carbon-black-cloud-forwarder/Watchlist |""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2.1. Test your data forwarder (Optional) \n To validate the data forwarder is configured as expected, in Carbon Black's portal search for the data forwarder that you just created and click on 'Test Forwarder' button under the 'Actions' column, this will generate a 'HealthCheck' file in the S3 Bucket, you should see it appear immediately.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the 'Add new collector' button, fill the required information, the ARN role and the SQS URL are created in step 1, note that you will need to enter the correct SQS URL and select the appropriate event type from the dropdown, for example if you want to ingest Alert events you will need to copy the Alerts SQS URL and select the 'Alerts' event type in the dropdown""}}]}, {""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-CarbonBlackAlertsStream"", ""text"": ""Alerts""}, {""key"": ""Custom-CarbonBlackAuthStream"", ""text"": ""Auth Events""}, {""key"": ""Custom-CarbonBlackEndpointStream"", ""text"": ""Endpoint Events""}, {""key"": ""Custom-CarbonBlackWatchlistStream"", ""text"": ""Watchlist""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Environment"", ""description"": ""You must have the following AWS resources defined and configured: S3, Simple Queue Service (SQS), IAM roles and permissions policies""}, {""name"": ""Environment"", ""description"": ""You must have the a Carbon black account and required permissions to create a Data Forwarded to AWS S3 buckets. \nFor more details visit [Carbon Black Data Forwarder Docs](https://docs.vmware.com/en/VMware-Carbon-Black-Cloud/services/carbon-black-cloud-user-guide/GUID-E8D33F72-BABB-4157-A908-D8BBDB5AF349.html)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/CarbonBlackViaAWSS3_ConnectorDefinition.json;https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/VMwareCarbonBlackCloud_ccp/CarbonBlack_DataConnectorDefination.json","false" -"","VMware SD-WAN and SASE","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20SD-WAN%20and%20SASE","velocloud","azure-sentinel-solution-vmwaresase","2023-12-31","","","VMware by Broadcom","Partner","https://developer.vmware.com/","","domains","","","","","","","false","","false" -"VMware_CWS_DLPLogs_CL","VMware SD-WAN and SASE","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20SD-WAN%20and%20SASE","velocloud","azure-sentinel-solution-vmwaresase","2023-12-31","","","VMware by Broadcom","Partner","https://developer.vmware.com/","","domains","VMwareSDWAN","VMware by Broadcom","VMware SD-WAN and SASE Connector","The [VMware SD-WAN & SASE](https://sase.vmware.com) data connector offers the capability to ingest VMware SD-WAN and CWS events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://developer.vmware.com/apis/vmware-sase-platform/) for more information. The connector provides ability to get events which helps to examine potential network security issues, identify misconfigured network devices and monitor SD-WAN and SASE usage. If you have your own custom connector, make sure that the connector is deployed under an isolated Log Analytics Workspace first. In case of issues, questions or feature requests, please contact us via email on sase-siem-integration@vmware.com.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the VMware Edge Cloud Orchestrator REST API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the VECO API**\n\n [Follow the instructions](https://docs.vmware.com/en/VMware-SD-WAN/5.3/VMware-SD-WAN-Administration-Guide/GUID-2FA3763F-835C-4D10-A32B-450FEB5397D8.html) to create and obtain the credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function.**""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the VMware SD-WAN and SASE Connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelvmwaresdwan)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**.\n3. Enter or modify the Function App, Log Analytics and Azure Monitor settings, enter the VECO FQDN (without https://, for example vco123-usvi1.velocloud.net), enter the API token created (including \""Token \"" at the beginning of the string), and adjust your desired Function App freaquency, then deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the VMware SD-WAN and SASE Connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-vmwaresdwan-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. vmwsase-siemXXXXXXXXXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.10.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab .\n3. Check if the application has these settings defined correctly and adjust if needed: \n\t\tapi_veco_authorization\n\t\tapi_veco_fqdn\n\t\tapp_frequency_mins\n\t\tazsa_share_connectionstring\n\t\tazsa_share_name dce_endpoint\n\t\tdcr_cwsdlplog_immutableid\n\t\tdcr_cwshealth_immutableid\n\t\tdcr_cwsweblog_immutableid\n\t\tdcr_efsfwlog_immutableid\n\t\tdcr_efshealth_immutableid\n\t\tdcr_saseaudit_immutableid\n\t\tstream_cwsdlplog\n\t\tstream_cwshealth\n\t\tstream_cwsweblog\n\t\tstream_efsfwlog\n\t\tstream_efshealth\n\t\tstream_saseaudit\n3. In case you made changes to application settings have been entered, make sure that you click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**api_veco_authorization**, **api_veco_fqdn** is required for REST API. [See the documentation to learn more about VMware SASE APIs](https://developer.vmware.com/apis/vmware-sase-platform/). Check all [requirements and follow the instructions](https://docs.vmware.com/en/VMware-SD-WAN/5.3/VMware-SD-WAN-Administration-Guide/GUID-2FA3763F-835C-4D10-A32B-450FEB5397D8.html) for obtaining credentials. The Function App only supports token-based API authentication. Be advised that the API Token generated will inherit the access rights of the user account under which it was generated.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20SD-WAN%20and%20SASE/Data%20Connectors/Function%20App%20Connector/VMwareSASE_API_FunctionApp.json","true" -"VMware_CWS_Health_CL","VMware SD-WAN and SASE","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20SD-WAN%20and%20SASE","velocloud","azure-sentinel-solution-vmwaresase","2023-12-31","","","VMware by Broadcom","Partner","https://developer.vmware.com/","","domains","VMwareSDWAN","VMware by Broadcom","VMware SD-WAN and SASE Connector","The [VMware SD-WAN & SASE](https://sase.vmware.com) data connector offers the capability to ingest VMware SD-WAN and CWS events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://developer.vmware.com/apis/vmware-sase-platform/) for more information. The connector provides ability to get events which helps to examine potential network security issues, identify misconfigured network devices and monitor SD-WAN and SASE usage. If you have your own custom connector, make sure that the connector is deployed under an isolated Log Analytics Workspace first. In case of issues, questions or feature requests, please contact us via email on sase-siem-integration@vmware.com.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the VMware Edge Cloud Orchestrator REST API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the VECO API**\n\n [Follow the instructions](https://docs.vmware.com/en/VMware-SD-WAN/5.3/VMware-SD-WAN-Administration-Guide/GUID-2FA3763F-835C-4D10-A32B-450FEB5397D8.html) to create and obtain the credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function.**""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the VMware SD-WAN and SASE Connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelvmwaresdwan)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**.\n3. Enter or modify the Function App, Log Analytics and Azure Monitor settings, enter the VECO FQDN (without https://, for example vco123-usvi1.velocloud.net), enter the API token created (including \""Token \"" at the beginning of the string), and adjust your desired Function App freaquency, then deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the VMware SD-WAN and SASE Connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-vmwaresdwan-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. vmwsase-siemXXXXXXXXXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.10.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab .\n3. Check if the application has these settings defined correctly and adjust if needed: \n\t\tapi_veco_authorization\n\t\tapi_veco_fqdn\n\t\tapp_frequency_mins\n\t\tazsa_share_connectionstring\n\t\tazsa_share_name dce_endpoint\n\t\tdcr_cwsdlplog_immutableid\n\t\tdcr_cwshealth_immutableid\n\t\tdcr_cwsweblog_immutableid\n\t\tdcr_efsfwlog_immutableid\n\t\tdcr_efshealth_immutableid\n\t\tdcr_saseaudit_immutableid\n\t\tstream_cwsdlplog\n\t\tstream_cwshealth\n\t\tstream_cwsweblog\n\t\tstream_efsfwlog\n\t\tstream_efshealth\n\t\tstream_saseaudit\n3. In case you made changes to application settings have been entered, make sure that you click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**api_veco_authorization**, **api_veco_fqdn** is required for REST API. [See the documentation to learn more about VMware SASE APIs](https://developer.vmware.com/apis/vmware-sase-platform/). Check all [requirements and follow the instructions](https://docs.vmware.com/en/VMware-SD-WAN/5.3/VMware-SD-WAN-Administration-Guide/GUID-2FA3763F-835C-4D10-A32B-450FEB5397D8.html) for obtaining credentials. The Function App only supports token-based API authentication. Be advised that the API Token generated will inherit the access rights of the user account under which it was generated.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20SD-WAN%20and%20SASE/Data%20Connectors/Function%20App%20Connector/VMwareSASE_API_FunctionApp.json","true" -"VMware_CWS_Weblogs_CL","VMware SD-WAN and SASE","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20SD-WAN%20and%20SASE","velocloud","azure-sentinel-solution-vmwaresase","2023-12-31","","","VMware by Broadcom","Partner","https://developer.vmware.com/","","domains","VMwareSDWAN","VMware by Broadcom","VMware SD-WAN and SASE Connector","The [VMware SD-WAN & SASE](https://sase.vmware.com) data connector offers the capability to ingest VMware SD-WAN and CWS events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://developer.vmware.com/apis/vmware-sase-platform/) for more information. The connector provides ability to get events which helps to examine potential network security issues, identify misconfigured network devices and monitor SD-WAN and SASE usage. If you have your own custom connector, make sure that the connector is deployed under an isolated Log Analytics Workspace first. In case of issues, questions or feature requests, please contact us via email on sase-siem-integration@vmware.com.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the VMware Edge Cloud Orchestrator REST API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the VECO API**\n\n [Follow the instructions](https://docs.vmware.com/en/VMware-SD-WAN/5.3/VMware-SD-WAN-Administration-Guide/GUID-2FA3763F-835C-4D10-A32B-450FEB5397D8.html) to create and obtain the credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function.**""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the VMware SD-WAN and SASE Connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelvmwaresdwan)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**.\n3. Enter or modify the Function App, Log Analytics and Azure Monitor settings, enter the VECO FQDN (without https://, for example vco123-usvi1.velocloud.net), enter the API token created (including \""Token \"" at the beginning of the string), and adjust your desired Function App freaquency, then deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the VMware SD-WAN and SASE Connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-vmwaresdwan-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. vmwsase-siemXXXXXXXXXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.10.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab .\n3. Check if the application has these settings defined correctly and adjust if needed: \n\t\tapi_veco_authorization\n\t\tapi_veco_fqdn\n\t\tapp_frequency_mins\n\t\tazsa_share_connectionstring\n\t\tazsa_share_name dce_endpoint\n\t\tdcr_cwsdlplog_immutableid\n\t\tdcr_cwshealth_immutableid\n\t\tdcr_cwsweblog_immutableid\n\t\tdcr_efsfwlog_immutableid\n\t\tdcr_efshealth_immutableid\n\t\tdcr_saseaudit_immutableid\n\t\tstream_cwsdlplog\n\t\tstream_cwshealth\n\t\tstream_cwsweblog\n\t\tstream_efsfwlog\n\t\tstream_efshealth\n\t\tstream_saseaudit\n3. In case you made changes to application settings have been entered, make sure that you click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**api_veco_authorization**, **api_veco_fqdn** is required for REST API. [See the documentation to learn more about VMware SASE APIs](https://developer.vmware.com/apis/vmware-sase-platform/). Check all [requirements and follow the instructions](https://docs.vmware.com/en/VMware-SD-WAN/5.3/VMware-SD-WAN-Administration-Guide/GUID-2FA3763F-835C-4D10-A32B-450FEB5397D8.html) for obtaining credentials. The Function App only supports token-based API authentication. Be advised that the API Token generated will inherit the access rights of the user account under which it was generated.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20SD-WAN%20and%20SASE/Data%20Connectors/Function%20App%20Connector/VMwareSASE_API_FunctionApp.json","true" -"VMware_VECO_EventLogs_CL","VMware SD-WAN and SASE","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20SD-WAN%20and%20SASE","velocloud","azure-sentinel-solution-vmwaresase","2023-12-31","","","VMware by Broadcom","Partner","https://developer.vmware.com/","","domains","VMwareSDWAN","VMware by Broadcom","VMware SD-WAN and SASE Connector","The [VMware SD-WAN & SASE](https://sase.vmware.com) data connector offers the capability to ingest VMware SD-WAN and CWS events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://developer.vmware.com/apis/vmware-sase-platform/) for more information. The connector provides ability to get events which helps to examine potential network security issues, identify misconfigured network devices and monitor SD-WAN and SASE usage. If you have your own custom connector, make sure that the connector is deployed under an isolated Log Analytics Workspace first. In case of issues, questions or feature requests, please contact us via email on sase-siem-integration@vmware.com.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the VMware Edge Cloud Orchestrator REST API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the VECO API**\n\n [Follow the instructions](https://docs.vmware.com/en/VMware-SD-WAN/5.3/VMware-SD-WAN-Administration-Guide/GUID-2FA3763F-835C-4D10-A32B-450FEB5397D8.html) to create and obtain the credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function.**""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the VMware SD-WAN and SASE Connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelvmwaresdwan)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**.\n3. Enter or modify the Function App, Log Analytics and Azure Monitor settings, enter the VECO FQDN (without https://, for example vco123-usvi1.velocloud.net), enter the API token created (including \""Token \"" at the beginning of the string), and adjust your desired Function App freaquency, then deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the VMware SD-WAN and SASE Connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-vmwaresdwan-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. vmwsase-siemXXXXXXXXXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.10.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab .\n3. Check if the application has these settings defined correctly and adjust if needed: \n\t\tapi_veco_authorization\n\t\tapi_veco_fqdn\n\t\tapp_frequency_mins\n\t\tazsa_share_connectionstring\n\t\tazsa_share_name dce_endpoint\n\t\tdcr_cwsdlplog_immutableid\n\t\tdcr_cwshealth_immutableid\n\t\tdcr_cwsweblog_immutableid\n\t\tdcr_efsfwlog_immutableid\n\t\tdcr_efshealth_immutableid\n\t\tdcr_saseaudit_immutableid\n\t\tstream_cwsdlplog\n\t\tstream_cwshealth\n\t\tstream_cwsweblog\n\t\tstream_efsfwlog\n\t\tstream_efshealth\n\t\tstream_saseaudit\n3. In case you made changes to application settings have been entered, make sure that you click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**api_veco_authorization**, **api_veco_fqdn** is required for REST API. [See the documentation to learn more about VMware SASE APIs](https://developer.vmware.com/apis/vmware-sase-platform/). Check all [requirements and follow the instructions](https://docs.vmware.com/en/VMware-SD-WAN/5.3/VMware-SD-WAN-Administration-Guide/GUID-2FA3763F-835C-4D10-A32B-450FEB5397D8.html) for obtaining credentials. The Function App only supports token-based API authentication. Be advised that the API Token generated will inherit the access rights of the user account under which it was generated.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20SD-WAN%20and%20SASE/Data%20Connectors/Function%20App%20Connector/VMwareSASE_API_FunctionApp.json","true" -"","VMware vCenter","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20vCenter","azuresentinel","azure-sentinel-solution-vcenter","2022-06-29","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"vcenter_CL","VMware vCenter","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20vCenter","azuresentinel","azure-sentinel-solution-vcenter","2022-06-29","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","VMwarevCenter","VMware","[Deprecated] VMware vCenter","The [vCenter](https://www.vmware.com/in/products/vcenter-server.html) connector allows you to easily connect your vCenter server logs with Microsoft Sentinel. This gives you more insight into your organization's data centers and improves your security operation capabilities.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias VMware vCenter and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20vCenter/Parsers/vCenter.txt), on the second line of the query, enter the hostname(s) of your VMware vCenter device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update. \n> 1. If you have not installed the vCenter solution from ContentHub then [Follow the steps](https://aka.ms/sentinel-vCenter-parser) to use the Kusto function alias, **vCenter**"", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Follow the configuration steps below to get vCenter server logs into Microsoft Sentinel. Refer to the [Azure Monitor Documentation](https://docs.microsoft.com/azure/azure-monitor/agents/data-sources-json) for more details on these steps.\n For vCenter Server logs, we have issues while parsing the data by OMS agent data using default settings. \nSo we advice to capture the logs into custom table **vcenter_CL** using below instructions. \n1. Login to the server where you have installed OMS agent.\n2. Download config file vCenter.conf \n\t\twget -v https://aka.ms/sentinel-vcenteroms-conf -O vcenter.conf \n3. Copy vcenter.conf to the /etc/opt/microsoft/omsagent/**workspace_id**/conf/omsagent.d/ folder. \n\t\tcp vcenter.conf /etc/opt/microsoft/omsagent/<>/conf/omsagent.d/\n4. Edit vcenter.conf as follows:\n\n\t a. vcenter.conf uses the port **22033** by default. Ensure this port is not being used by any other source on your server\n\n\t b. If you would like to change the default port for **vcenter.conf** make sure that you dont use default Azure monotoring /log analytic agent ports I.e.(For example CEF uses TCP port **25226** or **25224**) \n\n\t c. replace **workspace_id** with real value of your Workspace ID (lines 13,14,15,18)\n5. Save changes and restart the Azure Log Analytics agent for Linux service with the following command:\n\t\tsudo /opt/microsoft/omsagent/bin/service_control restart\n6. Modify /etc/rsyslog.conf file - add below template preferably at the beginning / before directives section \n\n\t\t$template vcenter,\""%timestamp% %hostname% %msg%\\ n\"" \n\n **Note - There is no space between slash(\\\\) and character 'n' in above command.**\n\n 7. Create a custom conf file in /etc/rsyslog.d/ for example 10-vcenter.conf and add following filter conditions.\n\nDownload config file [10-vCenter.conf](https://aka.ms/sentinel-vcenter-conf)\n\n\t With an added statement you will need to create a filter which will specify the logs coming from the vcenter server to be forwarded to the custom table.\n\n\t reference: [Filter Conditions \u2014 rsyslog 8.18.0.master documentation](https://rsyslog.readthedocs.io/en/latest/configuration/filters.html)\n\n\t Here is an example of filtering that can be defined, this is not complete and will require additional testing for each installation.\n\t\t if $rawmsg contains \""vcenter-server\"" then @@127.0.0.1:22033;vcenter\n\t\t & stop \n\t\t if $rawmsg contains \""vpxd\"" then @@127.0.0.1:22033;vcenter\n\t\t & stop\n\t\t \n8. Restart rsyslog\n\t\t systemctl restart rsyslog"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Configure and connect the vCenter device(s)"", ""description"": ""[Follow these instructions](https://docs.vmware.com/en/VMware-vSphere/7.0/com.vmware.vsphere.monitoring.doc/GUID-9633A961-A5C3-4658-B099-B81E0512DC21.html) to configure the vCenter to forward syslog. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Include custom pre-requisites if the connectivity requires - else delete customs"", ""description"": ""Description for any custom pre-requisite""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20vCenter/Data%20Connectors/Connector_Syslog_vcenter.json","true" -"","Valence Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Valence%20Security","valencesecurityinc1673598943514","valence_sentinel_solution","2023-11-20","","","Valence Security","Partner","https://www.valencesecurity.com/","","domains","","","","","","","false","","false" -"ValenceAlert_CL","Valence Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Valence%20Security","valencesecurityinc1673598943514","valence_sentinel_solution","2023-11-20","","","Valence Security","Partner","https://www.valencesecurity.com/","","domains","ValenceSecurity","Valence Security","SaaS Security","Connects the Valence SaaS security platform Azure Log Analytics via the REST API interface.","[{""title"": ""Step 1 : Read the detailed documentation"", ""description"": ""The installation process is documented in great detail in [Valence Security's knowledge base](https://support.valencesecurity.com). The user should consult this documentation further to understand installation and debug of the integration.""}, {""title"": ""Step 2: Retrieve the workspace access credentials"", ""description"": ""The first installation step is to retrieve both your **Workspace ID** and **Primary Key** from the Microsoft Sentinel platform.\nCopy the values shown below and save them for configuration of the API log forwarder integration."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Step 3: Configure Sentinel integration on the Valence Security Platform"", ""description"": ""As a Valence Security Platform admin, go to the [configuration screen](https://app.valencesecurity.com/settings/configuration), click Connect in the SIEM Integration card, and choose Microsoft Sentinel. Paste the values from the previous step and click Connect. Valence will test the connection so when success is reported, the connection worked.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Valence%20Security/Data%20Connectors/ValenceSecurity.json","true" -"","Varonis Purview","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Varonis%20Purview","varonis","azure-sentinel-solution-varonispurview","2025-10-27","2025-10-01","","Varonis","Partner","https://www.varonis.com/resources/support","","domains","","","","","","","false","","false" -"varonisresources_CL","Varonis Purview","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Varonis%20Purview","varonis","azure-sentinel-solution-varonispurview","2025-10-27","2025-10-01","","Varonis","Partner","https://www.varonis.com/resources/support","","domains","VaronisPurviewPush","Varonis","Varonis Purview Push Connector","The [Varonis Purview](https://www.varonis.com/) connector provides the capability to sync resources from Varonis to Microsoft Purview.","[{""title"": ""1. Run this to setup ingestion for Varonis Resoources"", ""description"": ""This will create the necessary Log Analytics tables, Data Collection Rule (DCR), and an Entra application to securely send data to the DCR."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Automated Configuration and Secure Data Ingestion with Entra Application \nClicking on \""Deploy\"" will trigger the creation of Log Analytics tables and a Data Collection Rule (DCR). \nIt will then create an Entra application, link the DCR to it, and set the entered secret in the application. This setup enables data to be sent securely to the DCR using an Entra token.""}}, {""parameters"": {""label"": ""Deploy Varonis connector resources"", ""applicationDisplayName"": ""Varonis Purview Connector Application""}, ""type"": ""DeployPushConnectorButton""}]}, {""title"": ""2. Push your logs into the workspace"", ""description"": ""Use the following parameters to configure the Varonis Purview Connector in your Varonis integrations dashboard."", ""instructions"": [{""parameters"": {""label"": ""Tenant ID (Directory ID)"", ""fillWith"": [""TenantId""]}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Application ID"", ""fillWith"": [""ApplicationId""], ""placeholder"": ""Deploy push connector to get the App Registration Application ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Secret"", ""fillWith"": [""ApplicationSecret""], ""placeholder"": ""Deploy push connector to get the App Registration Secret""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Endpoint Uri"", ""fillWith"": [""DataCollectionEndpoint""], ""placeholder"": ""Deploy push connector to get the Data Collection Endpoint Uri""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Rule Immutable ID"", ""fillWith"": [""DataCollectionRuleId""], ""placeholder"": ""Deploy push connector to get the Data Collection Rule Immutable ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Resources Stream Name"", ""value"": ""Custom-varonisresources""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft Entra"", ""description"": ""Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher.""}, {""name"": ""Microsoft Azure"", ""description"": ""Permission to assign Monitoring Metrics Publisher role on data collection rule (DCR). Typically requires Azure RBAC Owner or User Access Administrator role""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Varonis%20Purview/Data%20Connectors/VaronisPurview_ccp/VaronisPurview_connectorDefinition.json","true" -"","VaronisSaaS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VaronisSaaS","varonis","microsoft-sentinel-solution-varonissaas","2023-11-10","2023-11-10","","Varonis","Partner","https://www.varonis.com/resources/support","","domains","","","","","","","false","","false" -"VaronisAlerts_CL","VaronisSaaS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VaronisSaaS","varonis","microsoft-sentinel-solution-varonissaas","2023-11-10","2023-11-10","","Varonis","Partner","https://www.varonis.com/resources/support","","domains","VaronisSaaS","Varonis","Varonis SaaS","Varonis SaaS provides the capability to ingest [Varonis Alerts](https://www.varonis.com/products/datalert) into Microsoft Sentinel.

Varonis prioritizes deep data visibility, classification capabilities, and automated remediation for data access. Varonis builds a single prioritized view of risk for your data, so you can proactively and systematically eliminate risk from insider threats and cyberattacks.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to Varonis DatAlert service to pull alerts into Microsoft Sentinel. This might result in additional data ingestion costs. See the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": ""**For Azure function and related services installation use:**\n\n [![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FVaronisSaaS%2FData%2520Connectors%2Fazuredeploy.json)""}, {""title"": """", ""description"": ""STEP 1 - Obtain the Varonis DatAlert Endpoint API credentials.\n\n To generate the Client ID and API key:\n 1. Launch the Varonis Web Interface.\n 2. Navigate to Configuration -> API Keys. The API Keys page is displayed.\n 3. Click Create API Key. The Add New API Key settings are displayed on the right.\n 4. Fill in the name and description.\n 5. Click the Generate Key button.\n 6. Copy the API key secret and save it in a handy location. You won't be able to copy it again.\n\nFor additional information, please check: [Varonis Documentation](https://help.varonis.com/s/document-item?bundleId=ami1661784208197&topicId=emp1703144742927.html&_LANG=enus)""}, {""title"": """", ""description"": ""STEP 2 - Deploy the connector and the associated Azure Function."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceName""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": ""Use this method for automated deployment of the data connector using an ARM Template.\n\n1. Click the Deploy to Azure button. \n\n\t[![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FVaronisSaaS%2FData%2520Connectors%2Fazuredeploy.json)\n2. Select the preferred Subscription, Resource Group, Region, Storage Account Type.\n3. Enter Log Analytics Workspace Name, Varonis FQDN, Varonis SaaS API Key.\n4. Click Review + Create, Create.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VaronisSaaS/Data%20Connectors/VaronisSaaS_API_FunctionApp.json","true" -"","Vectra AI Detect","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Detect","vectraaiinc","ai_vectra_detect_mss","2022-05-24","2023-04-17","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","","","","","","","false","","false" -"CommonSecurityLog","Vectra AI Detect","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Detect","vectraaiinc","ai_vectra_detect_mss","2022-05-24","2023-04-17","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","AIVectraDetect","Vectra AI","[Deprecated] Vectra AI Detect via Legacy Agent","The AI Vectra Detect connector allows users to connect Vectra Detect logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives users more insight into their organization's network and improves their security operation capabilities.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 over TCP, UDP or TLS.\n\n> 1. Make sure that you have Python on your machine using the following command: python --version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward AI Vectra Detect logs to Syslog agent in CEF format"", ""description"": ""Configure Vectra (X Series) Agent to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent.\n\nFrom the Vectra UI, navigate to Settings > Notifications and Edit Syslog configuration. Follow below instructions to set up the connection:\n\n- Add a new Destination (which is the host where the Microsoft Sentinel Syslog Agent is running)\n\n- Set the Port as **514**\n\n- Set the Protocol as **UDP**\n\n- Set the format to **CEF**\n\n- Set Log types (Select all log types available)\n\n- Click on **Save**\n\nUser can click the **Test** button to force send some test events.\n\n For more information, refer to Cognito Detect Syslog Guide which can be downloaded from the ressource page in Detect UI.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python --version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Detect/Data%20Connectors/AIVectraDetect.json","true" -"CommonSecurityLog","Vectra AI Detect","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Detect","vectraaiinc","ai_vectra_detect_mss","2022-05-24","2023-04-17","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","AIVectraDetectAma","Vectra AI","[Deprecated] Vectra AI Detect via AMA","The AI Vectra Detect connector allows users to connect Vectra Detect logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives users more insight into their organization's network and improves their security operation capabilities.","[{""title"": """", ""description"": """", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine""}, {""title"": ""Step B. Forward AI Vectra Detect logs to Syslog agent in CEF format"", ""description"": ""Configure Vectra (X Series) Agent to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent.\n\nFrom the Vectra UI, navigate to Settings > Notifications and Edit Syslog configuration. Follow below instructions to set up the connection:\n\n- Add a new Destination (which is the host where the Microsoft Sentinel Syslog Agent is running)\n\n- Set the Port as **514**\n\n- Set the Protocol as **UDP**\n\n- Set the format to **CEF**\n\n- Set Log types (Select all log types available)\n\n- Click on **Save**\n\nUser can click the **Test** button to force send some test events.\n\n For more information, refer to Cognito Detect Syslog Guide which can be downloaded from the ressource page in Detect UI.""}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Detect/Data%20Connectors/template_AIVectraDetectAma.json","true" -"","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","","","","","","","false","","false" -"VectraStream","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","AIVectraStream","Vectra AI","AI Vectra Stream via Legacy Agent","The AI Vectra Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected **VectraStream** which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Install the Linux agent on sperate Linux instance.\n\n> Logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Follow the configuration steps below to get Vectra Stream metadata into Microsoft Sentinel. The Log Analytics agent is leveraged to send custom JSON into Azure Monitor, enabling the storage of the metadata into a custom table. For more information, refer to the [Azure Monitor Documentation](https://docs.microsoft.com/azure/azure-monitor/agents/data-sources-json).\n1. Download config file for the log analytics agent: VectraStream.conf (located in the Connector folder within the Vectra solution: https://aka.ms/sentinel-aivectrastream-conf).\n2. Login to the server where you have installed Azure Log Analytics agent.\n3. Copy VectraStream.conf to the /etc/opt/microsoft/omsagent/**workspace_id**/conf/omsagent.d/ folder.\n4. Edit VectraStream.conf as follows:\n\n\t i. configure an alternate port to send data to, if desired. Default port is 29009.\n\n\t ii. replace **workspace_id** with real value of your Workspace ID.\n5. Save changes and restart the Azure Log Analytics agent for Linux service with the following command:\n\t\tsudo /opt/microsoft/omsagent/bin/service_control restart"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Configure and connect Vectra AI Stream"", ""description"": ""Configure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via the Log Analytics Agent.\n\nFrom the Vectra UI, navigate to Settings > Cognito Stream and Edit the destination configuration:\n\n- Select Publisher: RAW JSON\n\n- Set the server IP or hostname (which is the host which run the Log Analytics Agent)\n\n- Set all the port to **29009** (this port can be modified if required)\n\n- Save\n\n- Set Log types (Select all log types available)\n\n- Click on **Save**\n\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Brain"", ""description"": ""must be configured to export Stream metadata in JSON""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/Connector_VectraAI_Stream.json","true" -"VectraStream_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","AIVectraStream","Vectra AI","AI Vectra Stream via Legacy Agent","The AI Vectra Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected **VectraStream** which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Install the Linux agent on sperate Linux instance.\n\n> Logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Follow the configuration steps below to get Vectra Stream metadata into Microsoft Sentinel. The Log Analytics agent is leveraged to send custom JSON into Azure Monitor, enabling the storage of the metadata into a custom table. For more information, refer to the [Azure Monitor Documentation](https://docs.microsoft.com/azure/azure-monitor/agents/data-sources-json).\n1. Download config file for the log analytics agent: VectraStream.conf (located in the Connector folder within the Vectra solution: https://aka.ms/sentinel-aivectrastream-conf).\n2. Login to the server where you have installed Azure Log Analytics agent.\n3. Copy VectraStream.conf to the /etc/opt/microsoft/omsagent/**workspace_id**/conf/omsagent.d/ folder.\n4. Edit VectraStream.conf as follows:\n\n\t i. configure an alternate port to send data to, if desired. Default port is 29009.\n\n\t ii. replace **workspace_id** with real value of your Workspace ID.\n5. Save changes and restart the Azure Log Analytics agent for Linux service with the following command:\n\t\tsudo /opt/microsoft/omsagent/bin/service_control restart"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Configure and connect Vectra AI Stream"", ""description"": ""Configure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via the Log Analytics Agent.\n\nFrom the Vectra UI, navigate to Settings > Cognito Stream and Edit the destination configuration:\n\n- Select Publisher: RAW JSON\n\n- Set the server IP or hostname (which is the host which run the Log Analytics Agent)\n\n- Set all the port to **29009** (this port can be modified if required)\n\n- Save\n\n- Set Log types (Select all log types available)\n\n- Click on **Save**\n\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Brain"", ""description"": ""must be configured to export Stream metadata in JSON""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/Connector_VectraAI_Stream.json","true" -"vectra_beacon_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on a Kusto Function to work as expected which are deployed as part of the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** Vectra AI Stream connector is only available for **Linux** agents with **syslog-ng**. Make sure that syslog-ng is installed!\n\n In the first part, we are going to create the custom tables requires for this solution (using an ARM template). Then we are going to configure the Data Connector."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Create custom tables in Log Analytic Workspace (ARM Template)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fvectranetworks%2FMicrosoft_Sentinel%2Fmain%2FStream%2FAMA%2FARM_Templates%2Fazuredeploy_CustomTables_connector.json)\n2. Provide the required details such as the resource group and Microsoft Log Analytics Workspace (**the workspace must exist!**)\n4. Click **Review + Create** to deploy.\n\n\t_Note: Once deployed, you must be able to see the custom tables in your Log Analytic Workspace (Settings ---> Tables)._"", ""instructions"": []}, {""title"": ""Step 2. Install the Syslog via AMA Data connector"", ""description"": ""_Note: This is only required if it has not been install yet in Microsoft Sentinel._\n1. Microsoft Sentinel workspace ---> Content Management ---> Content Hub.\n\n2. Search for 'Syslog' (Provider is Microsoft) and select it.\n\n3. Check 'Install' buton on the bottom of the right panel."", ""instructions"": []}, {""title"": ""Step 3. Configure the Syslog via AMA data connector"", ""description"": ""_Note: Two different Data Collection Rules (DCR) are going to be created during this step_\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector.\n\n2. Search for 'Syslog via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs: LOG_USER/LOG_NOTICE and LOG_LOCAL0/LOG_NOTICE.\n\n4. Create a first DCR (Data Collection Rule). Specify a name. Then, in the Resources tab, select the instance where AMA is going to run. In the Collect tab, select LOG_USER/LOG_NOTICE.\n\n5. Create a second DCR. Specify a different name. Then, in the Resources tab, choose the same host. In the Collect tab, select LOG_LOCAL0/LOG_NOTICE\n\n\n\n\tNote:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""In the next section, we are goning to modify the syslog-ng configuration that has been created where the AMA is deployed. Then, we are going to modify the DCR configuration to be able to sent the network metadata from Vectra Stream to different custom tables."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Modify the syslog-ng configuration"", ""description"": ""_Note: A DCR cannot have more than 10 output flows. As we have 16 custom tables in this solution, we need to split the traffic to two DCR using syslog-ng._\n1. Download the modified syslog-ng configuration file: [azuremonitoragent-tcp.conf](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/syslog-ng/azuremonitoragent-tcp.conf).\n2. Log into the instance where syslog-ng/AMA is running.\n3. Browse to /etc/syslog-ng/conf.d/ and replace the content of _azuremonitoragent-tcp.conf_ file with the one that you just downloaded.\n4. Save and restart syslog-ng (_systemctl restart syslog-ng_)."", ""instructions"": []}, {""title"": ""Step 2. Modify the Data Collection rules configuration"", ""description"": ""_Note: The Data Collection Rules that have been created are located in Azure Monitor (**Monitor ---> Settings ---> Data Collection Rules**)_\n 1. Locate the 2 DCR that you created in Microsoft Sentinel.\n 2. Open the first DCR where Syslog facility is LOG_USER. Then go to Automation ---> Export template ---> Deploy --> Edit template.\n 3. Download the dataFlows configuration for LOG_USER DCR: [Stream_DataFlows_dcr1.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr1.json) and find/replace the destination placeholder '' with your workspace name.\n 4. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 5. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace (same as step 3).\n 6. Save --> Review + Create --> Create.\n 7. Open the second DCR than you created (Facilily is LOG_LOCAL0) and edit the template (Automation ---> Export template ---> Deploy --> Edit template).\n 8. Download the dataFlows configuration for LOG_LOCAL0 DCR: [Stream_DataFlows_dcr2.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr2.json) and find/replace the destination placeholder '' with your wokrspace name.\n 9. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 10. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace.\n 11. Save --> Review + Create --> Create."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Configure Vectra AI Stream"", ""description"": ""\nConfigure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via AMA.\n\nFrom the Vectra UI, navigate to Settings > Stream and Edit the destination configuration:\n\n 1. Select Publisher: RAW JSON\n 2. Set the server IP or hostname (which is the host whhere AMA is running)\n 3. Set all the port to **514**.\n 4. Save.""}, {""title"": ""Run the following command to validate (or set up) that syslog-ng is listening on port 514"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Stream configuration"", ""description"": ""must be configured to export Stream metadata in JSON""}, {""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" -"vectra_dcerpc_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on a Kusto Function to work as expected which are deployed as part of the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** Vectra AI Stream connector is only available for **Linux** agents with **syslog-ng**. Make sure that syslog-ng is installed!\n\n In the first part, we are going to create the custom tables requires for this solution (using an ARM template). Then we are going to configure the Data Connector."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Create custom tables in Log Analytic Workspace (ARM Template)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fvectranetworks%2FMicrosoft_Sentinel%2Fmain%2FStream%2FAMA%2FARM_Templates%2Fazuredeploy_CustomTables_connector.json)\n2. Provide the required details such as the resource group and Microsoft Log Analytics Workspace (**the workspace must exist!**)\n4. Click **Review + Create** to deploy.\n\n\t_Note: Once deployed, you must be able to see the custom tables in your Log Analytic Workspace (Settings ---> Tables)._"", ""instructions"": []}, {""title"": ""Step 2. Install the Syslog via AMA Data connector"", ""description"": ""_Note: This is only required if it has not been install yet in Microsoft Sentinel._\n1. Microsoft Sentinel workspace ---> Content Management ---> Content Hub.\n\n2. Search for 'Syslog' (Provider is Microsoft) and select it.\n\n3. Check 'Install' buton on the bottom of the right panel."", ""instructions"": []}, {""title"": ""Step 3. Configure the Syslog via AMA data connector"", ""description"": ""_Note: Two different Data Collection Rules (DCR) are going to be created during this step_\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector.\n\n2. Search for 'Syslog via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs: LOG_USER/LOG_NOTICE and LOG_LOCAL0/LOG_NOTICE.\n\n4. Create a first DCR (Data Collection Rule). Specify a name. Then, in the Resources tab, select the instance where AMA is going to run. In the Collect tab, select LOG_USER/LOG_NOTICE.\n\n5. Create a second DCR. Specify a different name. Then, in the Resources tab, choose the same host. In the Collect tab, select LOG_LOCAL0/LOG_NOTICE\n\n\n\n\tNote:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""In the next section, we are goning to modify the syslog-ng configuration that has been created where the AMA is deployed. Then, we are going to modify the DCR configuration to be able to sent the network metadata from Vectra Stream to different custom tables."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Modify the syslog-ng configuration"", ""description"": ""_Note: A DCR cannot have more than 10 output flows. As we have 16 custom tables in this solution, we need to split the traffic to two DCR using syslog-ng._\n1. Download the modified syslog-ng configuration file: [azuremonitoragent-tcp.conf](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/syslog-ng/azuremonitoragent-tcp.conf).\n2. Log into the instance where syslog-ng/AMA is running.\n3. Browse to /etc/syslog-ng/conf.d/ and replace the content of _azuremonitoragent-tcp.conf_ file with the one that you just downloaded.\n4. Save and restart syslog-ng (_systemctl restart syslog-ng_)."", ""instructions"": []}, {""title"": ""Step 2. Modify the Data Collection rules configuration"", ""description"": ""_Note: The Data Collection Rules that have been created are located in Azure Monitor (**Monitor ---> Settings ---> Data Collection Rules**)_\n 1. Locate the 2 DCR that you created in Microsoft Sentinel.\n 2. Open the first DCR where Syslog facility is LOG_USER. Then go to Automation ---> Export template ---> Deploy --> Edit template.\n 3. Download the dataFlows configuration for LOG_USER DCR: [Stream_DataFlows_dcr1.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr1.json) and find/replace the destination placeholder '' with your workspace name.\n 4. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 5. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace (same as step 3).\n 6. Save --> Review + Create --> Create.\n 7. Open the second DCR than you created (Facilily is LOG_LOCAL0) and edit the template (Automation ---> Export template ---> Deploy --> Edit template).\n 8. Download the dataFlows configuration for LOG_LOCAL0 DCR: [Stream_DataFlows_dcr2.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr2.json) and find/replace the destination placeholder '' with your wokrspace name.\n 9. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 10. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace.\n 11. Save --> Review + Create --> Create."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Configure Vectra AI Stream"", ""description"": ""\nConfigure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via AMA.\n\nFrom the Vectra UI, navigate to Settings > Stream and Edit the destination configuration:\n\n 1. Select Publisher: RAW JSON\n 2. Set the server IP or hostname (which is the host whhere AMA is running)\n 3. Set all the port to **514**.\n 4. Save.""}, {""title"": ""Run the following command to validate (or set up) that syslog-ng is listening on port 514"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Stream configuration"", ""description"": ""must be configured to export Stream metadata in JSON""}, {""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" -"vectra_dhcp_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on a Kusto Function to work as expected which are deployed as part of the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** Vectra AI Stream connector is only available for **Linux** agents with **syslog-ng**. Make sure that syslog-ng is installed!\n\n In the first part, we are going to create the custom tables requires for this solution (using an ARM template). Then we are going to configure the Data Connector."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Create custom tables in Log Analytic Workspace (ARM Template)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fvectranetworks%2FMicrosoft_Sentinel%2Fmain%2FStream%2FAMA%2FARM_Templates%2Fazuredeploy_CustomTables_connector.json)\n2. Provide the required details such as the resource group and Microsoft Log Analytics Workspace (**the workspace must exist!**)\n4. Click **Review + Create** to deploy.\n\n\t_Note: Once deployed, you must be able to see the custom tables in your Log Analytic Workspace (Settings ---> Tables)._"", ""instructions"": []}, {""title"": ""Step 2. Install the Syslog via AMA Data connector"", ""description"": ""_Note: This is only required if it has not been install yet in Microsoft Sentinel._\n1. Microsoft Sentinel workspace ---> Content Management ---> Content Hub.\n\n2. Search for 'Syslog' (Provider is Microsoft) and select it.\n\n3. Check 'Install' buton on the bottom of the right panel."", ""instructions"": []}, {""title"": ""Step 3. Configure the Syslog via AMA data connector"", ""description"": ""_Note: Two different Data Collection Rules (DCR) are going to be created during this step_\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector.\n\n2. Search for 'Syslog via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs: LOG_USER/LOG_NOTICE and LOG_LOCAL0/LOG_NOTICE.\n\n4. Create a first DCR (Data Collection Rule). Specify a name. Then, in the Resources tab, select the instance where AMA is going to run. In the Collect tab, select LOG_USER/LOG_NOTICE.\n\n5. Create a second DCR. Specify a different name. Then, in the Resources tab, choose the same host. In the Collect tab, select LOG_LOCAL0/LOG_NOTICE\n\n\n\n\tNote:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""In the next section, we are goning to modify the syslog-ng configuration that has been created where the AMA is deployed. Then, we are going to modify the DCR configuration to be able to sent the network metadata from Vectra Stream to different custom tables."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Modify the syslog-ng configuration"", ""description"": ""_Note: A DCR cannot have more than 10 output flows. As we have 16 custom tables in this solution, we need to split the traffic to two DCR using syslog-ng._\n1. Download the modified syslog-ng configuration file: [azuremonitoragent-tcp.conf](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/syslog-ng/azuremonitoragent-tcp.conf).\n2. Log into the instance where syslog-ng/AMA is running.\n3. Browse to /etc/syslog-ng/conf.d/ and replace the content of _azuremonitoragent-tcp.conf_ file with the one that you just downloaded.\n4. Save and restart syslog-ng (_systemctl restart syslog-ng_)."", ""instructions"": []}, {""title"": ""Step 2. Modify the Data Collection rules configuration"", ""description"": ""_Note: The Data Collection Rules that have been created are located in Azure Monitor (**Monitor ---> Settings ---> Data Collection Rules**)_\n 1. Locate the 2 DCR that you created in Microsoft Sentinel.\n 2. Open the first DCR where Syslog facility is LOG_USER. Then go to Automation ---> Export template ---> Deploy --> Edit template.\n 3. Download the dataFlows configuration for LOG_USER DCR: [Stream_DataFlows_dcr1.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr1.json) and find/replace the destination placeholder '' with your workspace name.\n 4. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 5. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace (same as step 3).\n 6. Save --> Review + Create --> Create.\n 7. Open the second DCR than you created (Facilily is LOG_LOCAL0) and edit the template (Automation ---> Export template ---> Deploy --> Edit template).\n 8. Download the dataFlows configuration for LOG_LOCAL0 DCR: [Stream_DataFlows_dcr2.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr2.json) and find/replace the destination placeholder '' with your wokrspace name.\n 9. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 10. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace.\n 11. Save --> Review + Create --> Create."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Configure Vectra AI Stream"", ""description"": ""\nConfigure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via AMA.\n\nFrom the Vectra UI, navigate to Settings > Stream and Edit the destination configuration:\n\n 1. Select Publisher: RAW JSON\n 2. Set the server IP or hostname (which is the host whhere AMA is running)\n 3. Set all the port to **514**.\n 4. Save.""}, {""title"": ""Run the following command to validate (or set up) that syslog-ng is listening on port 514"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Stream configuration"", ""description"": ""must be configured to export Stream metadata in JSON""}, {""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" -"vectra_dns_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on a Kusto Function to work as expected which are deployed as part of the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** Vectra AI Stream connector is only available for **Linux** agents with **syslog-ng**. Make sure that syslog-ng is installed!\n\n In the first part, we are going to create the custom tables requires for this solution (using an ARM template). Then we are going to configure the Data Connector."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Create custom tables in Log Analytic Workspace (ARM Template)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fvectranetworks%2FMicrosoft_Sentinel%2Fmain%2FStream%2FAMA%2FARM_Templates%2Fazuredeploy_CustomTables_connector.json)\n2. Provide the required details such as the resource group and Microsoft Log Analytics Workspace (**the workspace must exist!**)\n4. Click **Review + Create** to deploy.\n\n\t_Note: Once deployed, you must be able to see the custom tables in your Log Analytic Workspace (Settings ---> Tables)._"", ""instructions"": []}, {""title"": ""Step 2. Install the Syslog via AMA Data connector"", ""description"": ""_Note: This is only required if it has not been install yet in Microsoft Sentinel._\n1. Microsoft Sentinel workspace ---> Content Management ---> Content Hub.\n\n2. Search for 'Syslog' (Provider is Microsoft) and select it.\n\n3. Check 'Install' buton on the bottom of the right panel."", ""instructions"": []}, {""title"": ""Step 3. Configure the Syslog via AMA data connector"", ""description"": ""_Note: Two different Data Collection Rules (DCR) are going to be created during this step_\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector.\n\n2. Search for 'Syslog via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs: LOG_USER/LOG_NOTICE and LOG_LOCAL0/LOG_NOTICE.\n\n4. Create a first DCR (Data Collection Rule). Specify a name. Then, in the Resources tab, select the instance where AMA is going to run. In the Collect tab, select LOG_USER/LOG_NOTICE.\n\n5. Create a second DCR. Specify a different name. Then, in the Resources tab, choose the same host. In the Collect tab, select LOG_LOCAL0/LOG_NOTICE\n\n\n\n\tNote:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""In the next section, we are goning to modify the syslog-ng configuration that has been created where the AMA is deployed. Then, we are going to modify the DCR configuration to be able to sent the network metadata from Vectra Stream to different custom tables."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Modify the syslog-ng configuration"", ""description"": ""_Note: A DCR cannot have more than 10 output flows. As we have 16 custom tables in this solution, we need to split the traffic to two DCR using syslog-ng._\n1. Download the modified syslog-ng configuration file: [azuremonitoragent-tcp.conf](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/syslog-ng/azuremonitoragent-tcp.conf).\n2. Log into the instance where syslog-ng/AMA is running.\n3. Browse to /etc/syslog-ng/conf.d/ and replace the content of _azuremonitoragent-tcp.conf_ file with the one that you just downloaded.\n4. Save and restart syslog-ng (_systemctl restart syslog-ng_)."", ""instructions"": []}, {""title"": ""Step 2. Modify the Data Collection rules configuration"", ""description"": ""_Note: The Data Collection Rules that have been created are located in Azure Monitor (**Monitor ---> Settings ---> Data Collection Rules**)_\n 1. Locate the 2 DCR that you created in Microsoft Sentinel.\n 2. Open the first DCR where Syslog facility is LOG_USER. Then go to Automation ---> Export template ---> Deploy --> Edit template.\n 3. Download the dataFlows configuration for LOG_USER DCR: [Stream_DataFlows_dcr1.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr1.json) and find/replace the destination placeholder '' with your workspace name.\n 4. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 5. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace (same as step 3).\n 6. Save --> Review + Create --> Create.\n 7. Open the second DCR than you created (Facilily is LOG_LOCAL0) and edit the template (Automation ---> Export template ---> Deploy --> Edit template).\n 8. Download the dataFlows configuration for LOG_LOCAL0 DCR: [Stream_DataFlows_dcr2.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr2.json) and find/replace the destination placeholder '' with your wokrspace name.\n 9. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 10. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace.\n 11. Save --> Review + Create --> Create."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Configure Vectra AI Stream"", ""description"": ""\nConfigure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via AMA.\n\nFrom the Vectra UI, navigate to Settings > Stream and Edit the destination configuration:\n\n 1. Select Publisher: RAW JSON\n 2. Set the server IP or hostname (which is the host whhere AMA is running)\n 3. Set all the port to **514**.\n 4. Save.""}, {""title"": ""Run the following command to validate (or set up) that syslog-ng is listening on port 514"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Stream configuration"", ""description"": ""must be configured to export Stream metadata in JSON""}, {""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" -"vectra_http_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on a Kusto Function to work as expected which are deployed as part of the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** Vectra AI Stream connector is only available for **Linux** agents with **syslog-ng**. Make sure that syslog-ng is installed!\n\n In the first part, we are going to create the custom tables requires for this solution (using an ARM template). Then we are going to configure the Data Connector."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Create custom tables in Log Analytic Workspace (ARM Template)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fvectranetworks%2FMicrosoft_Sentinel%2Fmain%2FStream%2FAMA%2FARM_Templates%2Fazuredeploy_CustomTables_connector.json)\n2. Provide the required details such as the resource group and Microsoft Log Analytics Workspace (**the workspace must exist!**)\n4. Click **Review + Create** to deploy.\n\n\t_Note: Once deployed, you must be able to see the custom tables in your Log Analytic Workspace (Settings ---> Tables)._"", ""instructions"": []}, {""title"": ""Step 2. Install the Syslog via AMA Data connector"", ""description"": ""_Note: This is only required if it has not been install yet in Microsoft Sentinel._\n1. Microsoft Sentinel workspace ---> Content Management ---> Content Hub.\n\n2. Search for 'Syslog' (Provider is Microsoft) and select it.\n\n3. Check 'Install' buton on the bottom of the right panel."", ""instructions"": []}, {""title"": ""Step 3. Configure the Syslog via AMA data connector"", ""description"": ""_Note: Two different Data Collection Rules (DCR) are going to be created during this step_\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector.\n\n2. Search for 'Syslog via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs: LOG_USER/LOG_NOTICE and LOG_LOCAL0/LOG_NOTICE.\n\n4. Create a first DCR (Data Collection Rule). Specify a name. Then, in the Resources tab, select the instance where AMA is going to run. In the Collect tab, select LOG_USER/LOG_NOTICE.\n\n5. Create a second DCR. Specify a different name. Then, in the Resources tab, choose the same host. In the Collect tab, select LOG_LOCAL0/LOG_NOTICE\n\n\n\n\tNote:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""In the next section, we are goning to modify the syslog-ng configuration that has been created where the AMA is deployed. Then, we are going to modify the DCR configuration to be able to sent the network metadata from Vectra Stream to different custom tables."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Modify the syslog-ng configuration"", ""description"": ""_Note: A DCR cannot have more than 10 output flows. As we have 16 custom tables in this solution, we need to split the traffic to two DCR using syslog-ng._\n1. Download the modified syslog-ng configuration file: [azuremonitoragent-tcp.conf](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/syslog-ng/azuremonitoragent-tcp.conf).\n2. Log into the instance where syslog-ng/AMA is running.\n3. Browse to /etc/syslog-ng/conf.d/ and replace the content of _azuremonitoragent-tcp.conf_ file with the one that you just downloaded.\n4. Save and restart syslog-ng (_systemctl restart syslog-ng_)."", ""instructions"": []}, {""title"": ""Step 2. Modify the Data Collection rules configuration"", ""description"": ""_Note: The Data Collection Rules that have been created are located in Azure Monitor (**Monitor ---> Settings ---> Data Collection Rules**)_\n 1. Locate the 2 DCR that you created in Microsoft Sentinel.\n 2. Open the first DCR where Syslog facility is LOG_USER. Then go to Automation ---> Export template ---> Deploy --> Edit template.\n 3. Download the dataFlows configuration for LOG_USER DCR: [Stream_DataFlows_dcr1.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr1.json) and find/replace the destination placeholder '' with your workspace name.\n 4. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 5. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace (same as step 3).\n 6. Save --> Review + Create --> Create.\n 7. Open the second DCR than you created (Facilily is LOG_LOCAL0) and edit the template (Automation ---> Export template ---> Deploy --> Edit template).\n 8. Download the dataFlows configuration for LOG_LOCAL0 DCR: [Stream_DataFlows_dcr2.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr2.json) and find/replace the destination placeholder '' with your wokrspace name.\n 9. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 10. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace.\n 11. Save --> Review + Create --> Create."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Configure Vectra AI Stream"", ""description"": ""\nConfigure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via AMA.\n\nFrom the Vectra UI, navigate to Settings > Stream and Edit the destination configuration:\n\n 1. Select Publisher: RAW JSON\n 2. Set the server IP or hostname (which is the host whhere AMA is running)\n 3. Set all the port to **514**.\n 4. Save.""}, {""title"": ""Run the following command to validate (or set up) that syslog-ng is listening on port 514"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Stream configuration"", ""description"": ""must be configured to export Stream metadata in JSON""}, {""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" -"vectra_isession_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on a Kusto Function to work as expected which are deployed as part of the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** Vectra AI Stream connector is only available for **Linux** agents with **syslog-ng**. Make sure that syslog-ng is installed!\n\n In the first part, we are going to create the custom tables requires for this solution (using an ARM template). Then we are going to configure the Data Connector."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Create custom tables in Log Analytic Workspace (ARM Template)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fvectranetworks%2FMicrosoft_Sentinel%2Fmain%2FStream%2FAMA%2FARM_Templates%2Fazuredeploy_CustomTables_connector.json)\n2. Provide the required details such as the resource group and Microsoft Log Analytics Workspace (**the workspace must exist!**)\n4. Click **Review + Create** to deploy.\n\n\t_Note: Once deployed, you must be able to see the custom tables in your Log Analytic Workspace (Settings ---> Tables)._"", ""instructions"": []}, {""title"": ""Step 2. Install the Syslog via AMA Data connector"", ""description"": ""_Note: This is only required if it has not been install yet in Microsoft Sentinel._\n1. Microsoft Sentinel workspace ---> Content Management ---> Content Hub.\n\n2. Search for 'Syslog' (Provider is Microsoft) and select it.\n\n3. Check 'Install' buton on the bottom of the right panel."", ""instructions"": []}, {""title"": ""Step 3. Configure the Syslog via AMA data connector"", ""description"": ""_Note: Two different Data Collection Rules (DCR) are going to be created during this step_\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector.\n\n2. Search for 'Syslog via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs: LOG_USER/LOG_NOTICE and LOG_LOCAL0/LOG_NOTICE.\n\n4. Create a first DCR (Data Collection Rule). Specify a name. Then, in the Resources tab, select the instance where AMA is going to run. In the Collect tab, select LOG_USER/LOG_NOTICE.\n\n5. Create a second DCR. Specify a different name. Then, in the Resources tab, choose the same host. In the Collect tab, select LOG_LOCAL0/LOG_NOTICE\n\n\n\n\tNote:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""In the next section, we are goning to modify the syslog-ng configuration that has been created where the AMA is deployed. Then, we are going to modify the DCR configuration to be able to sent the network metadata from Vectra Stream to different custom tables."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Modify the syslog-ng configuration"", ""description"": ""_Note: A DCR cannot have more than 10 output flows. As we have 16 custom tables in this solution, we need to split the traffic to two DCR using syslog-ng._\n1. Download the modified syslog-ng configuration file: [azuremonitoragent-tcp.conf](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/syslog-ng/azuremonitoragent-tcp.conf).\n2. Log into the instance where syslog-ng/AMA is running.\n3. Browse to /etc/syslog-ng/conf.d/ and replace the content of _azuremonitoragent-tcp.conf_ file with the one that you just downloaded.\n4. Save and restart syslog-ng (_systemctl restart syslog-ng_)."", ""instructions"": []}, {""title"": ""Step 2. Modify the Data Collection rules configuration"", ""description"": ""_Note: The Data Collection Rules that have been created are located in Azure Monitor (**Monitor ---> Settings ---> Data Collection Rules**)_\n 1. Locate the 2 DCR that you created in Microsoft Sentinel.\n 2. Open the first DCR where Syslog facility is LOG_USER. Then go to Automation ---> Export template ---> Deploy --> Edit template.\n 3. Download the dataFlows configuration for LOG_USER DCR: [Stream_DataFlows_dcr1.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr1.json) and find/replace the destination placeholder '' with your workspace name.\n 4. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 5. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace (same as step 3).\n 6. Save --> Review + Create --> Create.\n 7. Open the second DCR than you created (Facilily is LOG_LOCAL0) and edit the template (Automation ---> Export template ---> Deploy --> Edit template).\n 8. Download the dataFlows configuration for LOG_LOCAL0 DCR: [Stream_DataFlows_dcr2.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr2.json) and find/replace the destination placeholder '' with your wokrspace name.\n 9. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 10. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace.\n 11. Save --> Review + Create --> Create."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Configure Vectra AI Stream"", ""description"": ""\nConfigure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via AMA.\n\nFrom the Vectra UI, navigate to Settings > Stream and Edit the destination configuration:\n\n 1. Select Publisher: RAW JSON\n 2. Set the server IP or hostname (which is the host whhere AMA is running)\n 3. Set all the port to **514**.\n 4. Save.""}, {""title"": ""Run the following command to validate (or set up) that syslog-ng is listening on port 514"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Stream configuration"", ""description"": ""must be configured to export Stream metadata in JSON""}, {""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" -"vectra_kerberos_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on a Kusto Function to work as expected which are deployed as part of the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** Vectra AI Stream connector is only available for **Linux** agents with **syslog-ng**. Make sure that syslog-ng is installed!\n\n In the first part, we are going to create the custom tables requires for this solution (using an ARM template). Then we are going to configure the Data Connector."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Create custom tables in Log Analytic Workspace (ARM Template)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fvectranetworks%2FMicrosoft_Sentinel%2Fmain%2FStream%2FAMA%2FARM_Templates%2Fazuredeploy_CustomTables_connector.json)\n2. Provide the required details such as the resource group and Microsoft Log Analytics Workspace (**the workspace must exist!**)\n4. Click **Review + Create** to deploy.\n\n\t_Note: Once deployed, you must be able to see the custom tables in your Log Analytic Workspace (Settings ---> Tables)._"", ""instructions"": []}, {""title"": ""Step 2. Install the Syslog via AMA Data connector"", ""description"": ""_Note: This is only required if it has not been install yet in Microsoft Sentinel._\n1. Microsoft Sentinel workspace ---> Content Management ---> Content Hub.\n\n2. Search for 'Syslog' (Provider is Microsoft) and select it.\n\n3. Check 'Install' buton on the bottom of the right panel."", ""instructions"": []}, {""title"": ""Step 3. Configure the Syslog via AMA data connector"", ""description"": ""_Note: Two different Data Collection Rules (DCR) are going to be created during this step_\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector.\n\n2. Search for 'Syslog via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs: LOG_USER/LOG_NOTICE and LOG_LOCAL0/LOG_NOTICE.\n\n4. Create a first DCR (Data Collection Rule). Specify a name. Then, in the Resources tab, select the instance where AMA is going to run. In the Collect tab, select LOG_USER/LOG_NOTICE.\n\n5. Create a second DCR. Specify a different name. Then, in the Resources tab, choose the same host. In the Collect tab, select LOG_LOCAL0/LOG_NOTICE\n\n\n\n\tNote:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""In the next section, we are goning to modify the syslog-ng configuration that has been created where the AMA is deployed. Then, we are going to modify the DCR configuration to be able to sent the network metadata from Vectra Stream to different custom tables."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Modify the syslog-ng configuration"", ""description"": ""_Note: A DCR cannot have more than 10 output flows. As we have 16 custom tables in this solution, we need to split the traffic to two DCR using syslog-ng._\n1. Download the modified syslog-ng configuration file: [azuremonitoragent-tcp.conf](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/syslog-ng/azuremonitoragent-tcp.conf).\n2. Log into the instance where syslog-ng/AMA is running.\n3. Browse to /etc/syslog-ng/conf.d/ and replace the content of _azuremonitoragent-tcp.conf_ file with the one that you just downloaded.\n4. Save and restart syslog-ng (_systemctl restart syslog-ng_)."", ""instructions"": []}, {""title"": ""Step 2. Modify the Data Collection rules configuration"", ""description"": ""_Note: The Data Collection Rules that have been created are located in Azure Monitor (**Monitor ---> Settings ---> Data Collection Rules**)_\n 1. Locate the 2 DCR that you created in Microsoft Sentinel.\n 2. Open the first DCR where Syslog facility is LOG_USER. Then go to Automation ---> Export template ---> Deploy --> Edit template.\n 3. Download the dataFlows configuration for LOG_USER DCR: [Stream_DataFlows_dcr1.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr1.json) and find/replace the destination placeholder '' with your workspace name.\n 4. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 5. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace (same as step 3).\n 6. Save --> Review + Create --> Create.\n 7. Open the second DCR than you created (Facilily is LOG_LOCAL0) and edit the template (Automation ---> Export template ---> Deploy --> Edit template).\n 8. Download the dataFlows configuration for LOG_LOCAL0 DCR: [Stream_DataFlows_dcr2.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr2.json) and find/replace the destination placeholder '' with your wokrspace name.\n 9. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 10. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace.\n 11. Save --> Review + Create --> Create."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Configure Vectra AI Stream"", ""description"": ""\nConfigure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via AMA.\n\nFrom the Vectra UI, navigate to Settings > Stream and Edit the destination configuration:\n\n 1. Select Publisher: RAW JSON\n 2. Set the server IP or hostname (which is the host whhere AMA is running)\n 3. Set all the port to **514**.\n 4. Save.""}, {""title"": ""Run the following command to validate (or set up) that syslog-ng is listening on port 514"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Stream configuration"", ""description"": ""must be configured to export Stream metadata in JSON""}, {""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" -"vectra_ldap_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on a Kusto Function to work as expected which are deployed as part of the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** Vectra AI Stream connector is only available for **Linux** agents with **syslog-ng**. Make sure that syslog-ng is installed!\n\n In the first part, we are going to create the custom tables requires for this solution (using an ARM template). Then we are going to configure the Data Connector."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Create custom tables in Log Analytic Workspace (ARM Template)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fvectranetworks%2FMicrosoft_Sentinel%2Fmain%2FStream%2FAMA%2FARM_Templates%2Fazuredeploy_CustomTables_connector.json)\n2. Provide the required details such as the resource group and Microsoft Log Analytics Workspace (**the workspace must exist!**)\n4. Click **Review + Create** to deploy.\n\n\t_Note: Once deployed, you must be able to see the custom tables in your Log Analytic Workspace (Settings ---> Tables)._"", ""instructions"": []}, {""title"": ""Step 2. Install the Syslog via AMA Data connector"", ""description"": ""_Note: This is only required if it has not been install yet in Microsoft Sentinel._\n1. Microsoft Sentinel workspace ---> Content Management ---> Content Hub.\n\n2. Search for 'Syslog' (Provider is Microsoft) and select it.\n\n3. Check 'Install' buton on the bottom of the right panel."", ""instructions"": []}, {""title"": ""Step 3. Configure the Syslog via AMA data connector"", ""description"": ""_Note: Two different Data Collection Rules (DCR) are going to be created during this step_\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector.\n\n2. Search for 'Syslog via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs: LOG_USER/LOG_NOTICE and LOG_LOCAL0/LOG_NOTICE.\n\n4. Create a first DCR (Data Collection Rule). Specify a name. Then, in the Resources tab, select the instance where AMA is going to run. In the Collect tab, select LOG_USER/LOG_NOTICE.\n\n5. Create a second DCR. Specify a different name. Then, in the Resources tab, choose the same host. In the Collect tab, select LOG_LOCAL0/LOG_NOTICE\n\n\n\n\tNote:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""In the next section, we are goning to modify the syslog-ng configuration that has been created where the AMA is deployed. Then, we are going to modify the DCR configuration to be able to sent the network metadata from Vectra Stream to different custom tables."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Modify the syslog-ng configuration"", ""description"": ""_Note: A DCR cannot have more than 10 output flows. As we have 16 custom tables in this solution, we need to split the traffic to two DCR using syslog-ng._\n1. Download the modified syslog-ng configuration file: [azuremonitoragent-tcp.conf](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/syslog-ng/azuremonitoragent-tcp.conf).\n2. Log into the instance where syslog-ng/AMA is running.\n3. Browse to /etc/syslog-ng/conf.d/ and replace the content of _azuremonitoragent-tcp.conf_ file with the one that you just downloaded.\n4. Save and restart syslog-ng (_systemctl restart syslog-ng_)."", ""instructions"": []}, {""title"": ""Step 2. Modify the Data Collection rules configuration"", ""description"": ""_Note: The Data Collection Rules that have been created are located in Azure Monitor (**Monitor ---> Settings ---> Data Collection Rules**)_\n 1. Locate the 2 DCR that you created in Microsoft Sentinel.\n 2. Open the first DCR where Syslog facility is LOG_USER. Then go to Automation ---> Export template ---> Deploy --> Edit template.\n 3. Download the dataFlows configuration for LOG_USER DCR: [Stream_DataFlows_dcr1.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr1.json) and find/replace the destination placeholder '' with your workspace name.\n 4. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 5. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace (same as step 3).\n 6. Save --> Review + Create --> Create.\n 7. Open the second DCR than you created (Facilily is LOG_LOCAL0) and edit the template (Automation ---> Export template ---> Deploy --> Edit template).\n 8. Download the dataFlows configuration for LOG_LOCAL0 DCR: [Stream_DataFlows_dcr2.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr2.json) and find/replace the destination placeholder '' with your wokrspace name.\n 9. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 10. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace.\n 11. Save --> Review + Create --> Create."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Configure Vectra AI Stream"", ""description"": ""\nConfigure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via AMA.\n\nFrom the Vectra UI, navigate to Settings > Stream and Edit the destination configuration:\n\n 1. Select Publisher: RAW JSON\n 2. Set the server IP or hostname (which is the host whhere AMA is running)\n 3. Set all the port to **514**.\n 4. Save.""}, {""title"": ""Run the following command to validate (or set up) that syslog-ng is listening on port 514"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Stream configuration"", ""description"": ""must be configured to export Stream metadata in JSON""}, {""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" -"vectra_ntlm_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on a Kusto Function to work as expected which are deployed as part of the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** Vectra AI Stream connector is only available for **Linux** agents with **syslog-ng**. Make sure that syslog-ng is installed!\n\n In the first part, we are going to create the custom tables requires for this solution (using an ARM template). Then we are going to configure the Data Connector."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Create custom tables in Log Analytic Workspace (ARM Template)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fvectranetworks%2FMicrosoft_Sentinel%2Fmain%2FStream%2FAMA%2FARM_Templates%2Fazuredeploy_CustomTables_connector.json)\n2. Provide the required details such as the resource group and Microsoft Log Analytics Workspace (**the workspace must exist!**)\n4. Click **Review + Create** to deploy.\n\n\t_Note: Once deployed, you must be able to see the custom tables in your Log Analytic Workspace (Settings ---> Tables)._"", ""instructions"": []}, {""title"": ""Step 2. Install the Syslog via AMA Data connector"", ""description"": ""_Note: This is only required if it has not been install yet in Microsoft Sentinel._\n1. Microsoft Sentinel workspace ---> Content Management ---> Content Hub.\n\n2. Search for 'Syslog' (Provider is Microsoft) and select it.\n\n3. Check 'Install' buton on the bottom of the right panel."", ""instructions"": []}, {""title"": ""Step 3. Configure the Syslog via AMA data connector"", ""description"": ""_Note: Two different Data Collection Rules (DCR) are going to be created during this step_\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector.\n\n2. Search for 'Syslog via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs: LOG_USER/LOG_NOTICE and LOG_LOCAL0/LOG_NOTICE.\n\n4. Create a first DCR (Data Collection Rule). Specify a name. Then, in the Resources tab, select the instance where AMA is going to run. In the Collect tab, select LOG_USER/LOG_NOTICE.\n\n5. Create a second DCR. Specify a different name. Then, in the Resources tab, choose the same host. In the Collect tab, select LOG_LOCAL0/LOG_NOTICE\n\n\n\n\tNote:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""In the next section, we are goning to modify the syslog-ng configuration that has been created where the AMA is deployed. Then, we are going to modify the DCR configuration to be able to sent the network metadata from Vectra Stream to different custom tables."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Modify the syslog-ng configuration"", ""description"": ""_Note: A DCR cannot have more than 10 output flows. As we have 16 custom tables in this solution, we need to split the traffic to two DCR using syslog-ng._\n1. Download the modified syslog-ng configuration file: [azuremonitoragent-tcp.conf](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/syslog-ng/azuremonitoragent-tcp.conf).\n2. Log into the instance where syslog-ng/AMA is running.\n3. Browse to /etc/syslog-ng/conf.d/ and replace the content of _azuremonitoragent-tcp.conf_ file with the one that you just downloaded.\n4. Save and restart syslog-ng (_systemctl restart syslog-ng_)."", ""instructions"": []}, {""title"": ""Step 2. Modify the Data Collection rules configuration"", ""description"": ""_Note: The Data Collection Rules that have been created are located in Azure Monitor (**Monitor ---> Settings ---> Data Collection Rules**)_\n 1. Locate the 2 DCR that you created in Microsoft Sentinel.\n 2. Open the first DCR where Syslog facility is LOG_USER. Then go to Automation ---> Export template ---> Deploy --> Edit template.\n 3. Download the dataFlows configuration for LOG_USER DCR: [Stream_DataFlows_dcr1.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr1.json) and find/replace the destination placeholder '' with your workspace name.\n 4. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 5. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace (same as step 3).\n 6. Save --> Review + Create --> Create.\n 7. Open the second DCR than you created (Facilily is LOG_LOCAL0) and edit the template (Automation ---> Export template ---> Deploy --> Edit template).\n 8. Download the dataFlows configuration for LOG_LOCAL0 DCR: [Stream_DataFlows_dcr2.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr2.json) and find/replace the destination placeholder '' with your wokrspace name.\n 9. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 10. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace.\n 11. Save --> Review + Create --> Create."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Configure Vectra AI Stream"", ""description"": ""\nConfigure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via AMA.\n\nFrom the Vectra UI, navigate to Settings > Stream and Edit the destination configuration:\n\n 1. Select Publisher: RAW JSON\n 2. Set the server IP or hostname (which is the host whhere AMA is running)\n 3. Set all the port to **514**.\n 4. Save.""}, {""title"": ""Run the following command to validate (or set up) that syslog-ng is listening on port 514"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Stream configuration"", ""description"": ""must be configured to export Stream metadata in JSON""}, {""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" -"vectra_radius_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on a Kusto Function to work as expected which are deployed as part of the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** Vectra AI Stream connector is only available for **Linux** agents with **syslog-ng**. Make sure that syslog-ng is installed!\n\n In the first part, we are going to create the custom tables requires for this solution (using an ARM template). Then we are going to configure the Data Connector."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Create custom tables in Log Analytic Workspace (ARM Template)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fvectranetworks%2FMicrosoft_Sentinel%2Fmain%2FStream%2FAMA%2FARM_Templates%2Fazuredeploy_CustomTables_connector.json)\n2. Provide the required details such as the resource group and Microsoft Log Analytics Workspace (**the workspace must exist!**)\n4. Click **Review + Create** to deploy.\n\n\t_Note: Once deployed, you must be able to see the custom tables in your Log Analytic Workspace (Settings ---> Tables)._"", ""instructions"": []}, {""title"": ""Step 2. Install the Syslog via AMA Data connector"", ""description"": ""_Note: This is only required if it has not been install yet in Microsoft Sentinel._\n1. Microsoft Sentinel workspace ---> Content Management ---> Content Hub.\n\n2. Search for 'Syslog' (Provider is Microsoft) and select it.\n\n3. Check 'Install' buton on the bottom of the right panel."", ""instructions"": []}, {""title"": ""Step 3. Configure the Syslog via AMA data connector"", ""description"": ""_Note: Two different Data Collection Rules (DCR) are going to be created during this step_\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector.\n\n2. Search for 'Syslog via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs: LOG_USER/LOG_NOTICE and LOG_LOCAL0/LOG_NOTICE.\n\n4. Create a first DCR (Data Collection Rule). Specify a name. Then, in the Resources tab, select the instance where AMA is going to run. In the Collect tab, select LOG_USER/LOG_NOTICE.\n\n5. Create a second DCR. Specify a different name. Then, in the Resources tab, choose the same host. In the Collect tab, select LOG_LOCAL0/LOG_NOTICE\n\n\n\n\tNote:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""In the next section, we are goning to modify the syslog-ng configuration that has been created where the AMA is deployed. Then, we are going to modify the DCR configuration to be able to sent the network metadata from Vectra Stream to different custom tables."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Modify the syslog-ng configuration"", ""description"": ""_Note: A DCR cannot have more than 10 output flows. As we have 16 custom tables in this solution, we need to split the traffic to two DCR using syslog-ng._\n1. Download the modified syslog-ng configuration file: [azuremonitoragent-tcp.conf](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/syslog-ng/azuremonitoragent-tcp.conf).\n2. Log into the instance where syslog-ng/AMA is running.\n3. Browse to /etc/syslog-ng/conf.d/ and replace the content of _azuremonitoragent-tcp.conf_ file with the one that you just downloaded.\n4. Save and restart syslog-ng (_systemctl restart syslog-ng_)."", ""instructions"": []}, {""title"": ""Step 2. Modify the Data Collection rules configuration"", ""description"": ""_Note: The Data Collection Rules that have been created are located in Azure Monitor (**Monitor ---> Settings ---> Data Collection Rules**)_\n 1. Locate the 2 DCR that you created in Microsoft Sentinel.\n 2. Open the first DCR where Syslog facility is LOG_USER. Then go to Automation ---> Export template ---> Deploy --> Edit template.\n 3. Download the dataFlows configuration for LOG_USER DCR: [Stream_DataFlows_dcr1.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr1.json) and find/replace the destination placeholder '' with your workspace name.\n 4. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 5. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace (same as step 3).\n 6. Save --> Review + Create --> Create.\n 7. Open the second DCR than you created (Facilily is LOG_LOCAL0) and edit the template (Automation ---> Export template ---> Deploy --> Edit template).\n 8. Download the dataFlows configuration for LOG_LOCAL0 DCR: [Stream_DataFlows_dcr2.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr2.json) and find/replace the destination placeholder '' with your wokrspace name.\n 9. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 10. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace.\n 11. Save --> Review + Create --> Create."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Configure Vectra AI Stream"", ""description"": ""\nConfigure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via AMA.\n\nFrom the Vectra UI, navigate to Settings > Stream and Edit the destination configuration:\n\n 1. Select Publisher: RAW JSON\n 2. Set the server IP or hostname (which is the host whhere AMA is running)\n 3. Set all the port to **514**.\n 4. Save.""}, {""title"": ""Run the following command to validate (or set up) that syslog-ng is listening on port 514"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Stream configuration"", ""description"": ""must be configured to export Stream metadata in JSON""}, {""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" -"vectra_rdp_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on a Kusto Function to work as expected which are deployed as part of the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** Vectra AI Stream connector is only available for **Linux** agents with **syslog-ng**. Make sure that syslog-ng is installed!\n\n In the first part, we are going to create the custom tables requires for this solution (using an ARM template). Then we are going to configure the Data Connector."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Create custom tables in Log Analytic Workspace (ARM Template)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fvectranetworks%2FMicrosoft_Sentinel%2Fmain%2FStream%2FAMA%2FARM_Templates%2Fazuredeploy_CustomTables_connector.json)\n2. Provide the required details such as the resource group and Microsoft Log Analytics Workspace (**the workspace must exist!**)\n4. Click **Review + Create** to deploy.\n\n\t_Note: Once deployed, you must be able to see the custom tables in your Log Analytic Workspace (Settings ---> Tables)._"", ""instructions"": []}, {""title"": ""Step 2. Install the Syslog via AMA Data connector"", ""description"": ""_Note: This is only required if it has not been install yet in Microsoft Sentinel._\n1. Microsoft Sentinel workspace ---> Content Management ---> Content Hub.\n\n2. Search for 'Syslog' (Provider is Microsoft) and select it.\n\n3. Check 'Install' buton on the bottom of the right panel."", ""instructions"": []}, {""title"": ""Step 3. Configure the Syslog via AMA data connector"", ""description"": ""_Note: Two different Data Collection Rules (DCR) are going to be created during this step_\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector.\n\n2. Search for 'Syslog via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs: LOG_USER/LOG_NOTICE and LOG_LOCAL0/LOG_NOTICE.\n\n4. Create a first DCR (Data Collection Rule). Specify a name. Then, in the Resources tab, select the instance where AMA is going to run. In the Collect tab, select LOG_USER/LOG_NOTICE.\n\n5. Create a second DCR. Specify a different name. Then, in the Resources tab, choose the same host. In the Collect tab, select LOG_LOCAL0/LOG_NOTICE\n\n\n\n\tNote:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""In the next section, we are goning to modify the syslog-ng configuration that has been created where the AMA is deployed. Then, we are going to modify the DCR configuration to be able to sent the network metadata from Vectra Stream to different custom tables."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Modify the syslog-ng configuration"", ""description"": ""_Note: A DCR cannot have more than 10 output flows. As we have 16 custom tables in this solution, we need to split the traffic to two DCR using syslog-ng._\n1. Download the modified syslog-ng configuration file: [azuremonitoragent-tcp.conf](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/syslog-ng/azuremonitoragent-tcp.conf).\n2. Log into the instance where syslog-ng/AMA is running.\n3. Browse to /etc/syslog-ng/conf.d/ and replace the content of _azuremonitoragent-tcp.conf_ file with the one that you just downloaded.\n4. Save and restart syslog-ng (_systemctl restart syslog-ng_)."", ""instructions"": []}, {""title"": ""Step 2. Modify the Data Collection rules configuration"", ""description"": ""_Note: The Data Collection Rules that have been created are located in Azure Monitor (**Monitor ---> Settings ---> Data Collection Rules**)_\n 1. Locate the 2 DCR that you created in Microsoft Sentinel.\n 2. Open the first DCR where Syslog facility is LOG_USER. Then go to Automation ---> Export template ---> Deploy --> Edit template.\n 3. Download the dataFlows configuration for LOG_USER DCR: [Stream_DataFlows_dcr1.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr1.json) and find/replace the destination placeholder '' with your workspace name.\n 4. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 5. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace (same as step 3).\n 6. Save --> Review + Create --> Create.\n 7. Open the second DCR than you created (Facilily is LOG_LOCAL0) and edit the template (Automation ---> Export template ---> Deploy --> Edit template).\n 8. Download the dataFlows configuration for LOG_LOCAL0 DCR: [Stream_DataFlows_dcr2.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr2.json) and find/replace the destination placeholder '' with your wokrspace name.\n 9. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 10. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace.\n 11. Save --> Review + Create --> Create."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Configure Vectra AI Stream"", ""description"": ""\nConfigure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via AMA.\n\nFrom the Vectra UI, navigate to Settings > Stream and Edit the destination configuration:\n\n 1. Select Publisher: RAW JSON\n 2. Set the server IP or hostname (which is the host whhere AMA is running)\n 3. Set all the port to **514**.\n 4. Save.""}, {""title"": ""Run the following command to validate (or set up) that syslog-ng is listening on port 514"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Stream configuration"", ""description"": ""must be configured to export Stream metadata in JSON""}, {""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" -"vectra_smbfiles_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on a Kusto Function to work as expected which are deployed as part of the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** Vectra AI Stream connector is only available for **Linux** agents with **syslog-ng**. Make sure that syslog-ng is installed!\n\n In the first part, we are going to create the custom tables requires for this solution (using an ARM template). Then we are going to configure the Data Connector."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Create custom tables in Log Analytic Workspace (ARM Template)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fvectranetworks%2FMicrosoft_Sentinel%2Fmain%2FStream%2FAMA%2FARM_Templates%2Fazuredeploy_CustomTables_connector.json)\n2. Provide the required details such as the resource group and Microsoft Log Analytics Workspace (**the workspace must exist!**)\n4. Click **Review + Create** to deploy.\n\n\t_Note: Once deployed, you must be able to see the custom tables in your Log Analytic Workspace (Settings ---> Tables)._"", ""instructions"": []}, {""title"": ""Step 2. Install the Syslog via AMA Data connector"", ""description"": ""_Note: This is only required if it has not been install yet in Microsoft Sentinel._\n1. Microsoft Sentinel workspace ---> Content Management ---> Content Hub.\n\n2. Search for 'Syslog' (Provider is Microsoft) and select it.\n\n3. Check 'Install' buton on the bottom of the right panel."", ""instructions"": []}, {""title"": ""Step 3. Configure the Syslog via AMA data connector"", ""description"": ""_Note: Two different Data Collection Rules (DCR) are going to be created during this step_\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector.\n\n2. Search for 'Syslog via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs: LOG_USER/LOG_NOTICE and LOG_LOCAL0/LOG_NOTICE.\n\n4. Create a first DCR (Data Collection Rule). Specify a name. Then, in the Resources tab, select the instance where AMA is going to run. In the Collect tab, select LOG_USER/LOG_NOTICE.\n\n5. Create a second DCR. Specify a different name. Then, in the Resources tab, choose the same host. In the Collect tab, select LOG_LOCAL0/LOG_NOTICE\n\n\n\n\tNote:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""In the next section, we are goning to modify the syslog-ng configuration that has been created where the AMA is deployed. Then, we are going to modify the DCR configuration to be able to sent the network metadata from Vectra Stream to different custom tables."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Modify the syslog-ng configuration"", ""description"": ""_Note: A DCR cannot have more than 10 output flows. As we have 16 custom tables in this solution, we need to split the traffic to two DCR using syslog-ng._\n1. Download the modified syslog-ng configuration file: [azuremonitoragent-tcp.conf](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/syslog-ng/azuremonitoragent-tcp.conf).\n2. Log into the instance where syslog-ng/AMA is running.\n3. Browse to /etc/syslog-ng/conf.d/ and replace the content of _azuremonitoragent-tcp.conf_ file with the one that you just downloaded.\n4. Save and restart syslog-ng (_systemctl restart syslog-ng_)."", ""instructions"": []}, {""title"": ""Step 2. Modify the Data Collection rules configuration"", ""description"": ""_Note: The Data Collection Rules that have been created are located in Azure Monitor (**Monitor ---> Settings ---> Data Collection Rules**)_\n 1. Locate the 2 DCR that you created in Microsoft Sentinel.\n 2. Open the first DCR where Syslog facility is LOG_USER. Then go to Automation ---> Export template ---> Deploy --> Edit template.\n 3. Download the dataFlows configuration for LOG_USER DCR: [Stream_DataFlows_dcr1.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr1.json) and find/replace the destination placeholder '' with your workspace name.\n 4. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 5. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace (same as step 3).\n 6. Save --> Review + Create --> Create.\n 7. Open the second DCR than you created (Facilily is LOG_LOCAL0) and edit the template (Automation ---> Export template ---> Deploy --> Edit template).\n 8. Download the dataFlows configuration for LOG_LOCAL0 DCR: [Stream_DataFlows_dcr2.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr2.json) and find/replace the destination placeholder '' with your wokrspace name.\n 9. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 10. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace.\n 11. Save --> Review + Create --> Create."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Configure Vectra AI Stream"", ""description"": ""\nConfigure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via AMA.\n\nFrom the Vectra UI, navigate to Settings > Stream and Edit the destination configuration:\n\n 1. Select Publisher: RAW JSON\n 2. Set the server IP or hostname (which is the host whhere AMA is running)\n 3. Set all the port to **514**.\n 4. Save.""}, {""title"": ""Run the following command to validate (or set up) that syslog-ng is listening on port 514"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Stream configuration"", ""description"": ""must be configured to export Stream metadata in JSON""}, {""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" -"vectra_smbmapping_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on a Kusto Function to work as expected which are deployed as part of the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** Vectra AI Stream connector is only available for **Linux** agents with **syslog-ng**. Make sure that syslog-ng is installed!\n\n In the first part, we are going to create the custom tables requires for this solution (using an ARM template). Then we are going to configure the Data Connector."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Create custom tables in Log Analytic Workspace (ARM Template)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fvectranetworks%2FMicrosoft_Sentinel%2Fmain%2FStream%2FAMA%2FARM_Templates%2Fazuredeploy_CustomTables_connector.json)\n2. Provide the required details such as the resource group and Microsoft Log Analytics Workspace (**the workspace must exist!**)\n4. Click **Review + Create** to deploy.\n\n\t_Note: Once deployed, you must be able to see the custom tables in your Log Analytic Workspace (Settings ---> Tables)._"", ""instructions"": []}, {""title"": ""Step 2. Install the Syslog via AMA Data connector"", ""description"": ""_Note: This is only required if it has not been install yet in Microsoft Sentinel._\n1. Microsoft Sentinel workspace ---> Content Management ---> Content Hub.\n\n2. Search for 'Syslog' (Provider is Microsoft) and select it.\n\n3. Check 'Install' buton on the bottom of the right panel."", ""instructions"": []}, {""title"": ""Step 3. Configure the Syslog via AMA data connector"", ""description"": ""_Note: Two different Data Collection Rules (DCR) are going to be created during this step_\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector.\n\n2. Search for 'Syslog via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs: LOG_USER/LOG_NOTICE and LOG_LOCAL0/LOG_NOTICE.\n\n4. Create a first DCR (Data Collection Rule). Specify a name. Then, in the Resources tab, select the instance where AMA is going to run. In the Collect tab, select LOG_USER/LOG_NOTICE.\n\n5. Create a second DCR. Specify a different name. Then, in the Resources tab, choose the same host. In the Collect tab, select LOG_LOCAL0/LOG_NOTICE\n\n\n\n\tNote:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""In the next section, we are goning to modify the syslog-ng configuration that has been created where the AMA is deployed. Then, we are going to modify the DCR configuration to be able to sent the network metadata from Vectra Stream to different custom tables."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Modify the syslog-ng configuration"", ""description"": ""_Note: A DCR cannot have more than 10 output flows. As we have 16 custom tables in this solution, we need to split the traffic to two DCR using syslog-ng._\n1. Download the modified syslog-ng configuration file: [azuremonitoragent-tcp.conf](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/syslog-ng/azuremonitoragent-tcp.conf).\n2. Log into the instance where syslog-ng/AMA is running.\n3. Browse to /etc/syslog-ng/conf.d/ and replace the content of _azuremonitoragent-tcp.conf_ file with the one that you just downloaded.\n4. Save and restart syslog-ng (_systemctl restart syslog-ng_)."", ""instructions"": []}, {""title"": ""Step 2. Modify the Data Collection rules configuration"", ""description"": ""_Note: The Data Collection Rules that have been created are located in Azure Monitor (**Monitor ---> Settings ---> Data Collection Rules**)_\n 1. Locate the 2 DCR that you created in Microsoft Sentinel.\n 2. Open the first DCR where Syslog facility is LOG_USER. Then go to Automation ---> Export template ---> Deploy --> Edit template.\n 3. Download the dataFlows configuration for LOG_USER DCR: [Stream_DataFlows_dcr1.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr1.json) and find/replace the destination placeholder '' with your workspace name.\n 4. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 5. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace (same as step 3).\n 6. Save --> Review + Create --> Create.\n 7. Open the second DCR than you created (Facilily is LOG_LOCAL0) and edit the template (Automation ---> Export template ---> Deploy --> Edit template).\n 8. Download the dataFlows configuration for LOG_LOCAL0 DCR: [Stream_DataFlows_dcr2.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr2.json) and find/replace the destination placeholder '' with your wokrspace name.\n 9. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 10. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace.\n 11. Save --> Review + Create --> Create."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Configure Vectra AI Stream"", ""description"": ""\nConfigure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via AMA.\n\nFrom the Vectra UI, navigate to Settings > Stream and Edit the destination configuration:\n\n 1. Select Publisher: RAW JSON\n 2. Set the server IP or hostname (which is the host whhere AMA is running)\n 3. Set all the port to **514**.\n 4. Save.""}, {""title"": ""Run the following command to validate (or set up) that syslog-ng is listening on port 514"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Stream configuration"", ""description"": ""must be configured to export Stream metadata in JSON""}, {""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" -"vectra_smtp_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on a Kusto Function to work as expected which are deployed as part of the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** Vectra AI Stream connector is only available for **Linux** agents with **syslog-ng**. Make sure that syslog-ng is installed!\n\n In the first part, we are going to create the custom tables requires for this solution (using an ARM template). Then we are going to configure the Data Connector."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Create custom tables in Log Analytic Workspace (ARM Template)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fvectranetworks%2FMicrosoft_Sentinel%2Fmain%2FStream%2FAMA%2FARM_Templates%2Fazuredeploy_CustomTables_connector.json)\n2. Provide the required details such as the resource group and Microsoft Log Analytics Workspace (**the workspace must exist!**)\n4. Click **Review + Create** to deploy.\n\n\t_Note: Once deployed, you must be able to see the custom tables in your Log Analytic Workspace (Settings ---> Tables)._"", ""instructions"": []}, {""title"": ""Step 2. Install the Syslog via AMA Data connector"", ""description"": ""_Note: This is only required if it has not been install yet in Microsoft Sentinel._\n1. Microsoft Sentinel workspace ---> Content Management ---> Content Hub.\n\n2. Search for 'Syslog' (Provider is Microsoft) and select it.\n\n3. Check 'Install' buton on the bottom of the right panel."", ""instructions"": []}, {""title"": ""Step 3. Configure the Syslog via AMA data connector"", ""description"": ""_Note: Two different Data Collection Rules (DCR) are going to be created during this step_\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector.\n\n2. Search for 'Syslog via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs: LOG_USER/LOG_NOTICE and LOG_LOCAL0/LOG_NOTICE.\n\n4. Create a first DCR (Data Collection Rule). Specify a name. Then, in the Resources tab, select the instance where AMA is going to run. In the Collect tab, select LOG_USER/LOG_NOTICE.\n\n5. Create a second DCR. Specify a different name. Then, in the Resources tab, choose the same host. In the Collect tab, select LOG_LOCAL0/LOG_NOTICE\n\n\n\n\tNote:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""In the next section, we are goning to modify the syslog-ng configuration that has been created where the AMA is deployed. Then, we are going to modify the DCR configuration to be able to sent the network metadata from Vectra Stream to different custom tables."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Modify the syslog-ng configuration"", ""description"": ""_Note: A DCR cannot have more than 10 output flows. As we have 16 custom tables in this solution, we need to split the traffic to two DCR using syslog-ng._\n1. Download the modified syslog-ng configuration file: [azuremonitoragent-tcp.conf](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/syslog-ng/azuremonitoragent-tcp.conf).\n2. Log into the instance where syslog-ng/AMA is running.\n3. Browse to /etc/syslog-ng/conf.d/ and replace the content of _azuremonitoragent-tcp.conf_ file with the one that you just downloaded.\n4. Save and restart syslog-ng (_systemctl restart syslog-ng_)."", ""instructions"": []}, {""title"": ""Step 2. Modify the Data Collection rules configuration"", ""description"": ""_Note: The Data Collection Rules that have been created are located in Azure Monitor (**Monitor ---> Settings ---> Data Collection Rules**)_\n 1. Locate the 2 DCR that you created in Microsoft Sentinel.\n 2. Open the first DCR where Syslog facility is LOG_USER. Then go to Automation ---> Export template ---> Deploy --> Edit template.\n 3. Download the dataFlows configuration for LOG_USER DCR: [Stream_DataFlows_dcr1.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr1.json) and find/replace the destination placeholder '' with your workspace name.\n 4. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 5. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace (same as step 3).\n 6. Save --> Review + Create --> Create.\n 7. Open the second DCR than you created (Facilily is LOG_LOCAL0) and edit the template (Automation ---> Export template ---> Deploy --> Edit template).\n 8. Download the dataFlows configuration for LOG_LOCAL0 DCR: [Stream_DataFlows_dcr2.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr2.json) and find/replace the destination placeholder '' with your wokrspace name.\n 9. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 10. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace.\n 11. Save --> Review + Create --> Create."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Configure Vectra AI Stream"", ""description"": ""\nConfigure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via AMA.\n\nFrom the Vectra UI, navigate to Settings > Stream and Edit the destination configuration:\n\n 1. Select Publisher: RAW JSON\n 2. Set the server IP or hostname (which is the host whhere AMA is running)\n 3. Set all the port to **514**.\n 4. Save.""}, {""title"": ""Run the following command to validate (or set up) that syslog-ng is listening on port 514"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Stream configuration"", ""description"": ""must be configured to export Stream metadata in JSON""}, {""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" -"vectra_ssh_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on a Kusto Function to work as expected which are deployed as part of the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** Vectra AI Stream connector is only available for **Linux** agents with **syslog-ng**. Make sure that syslog-ng is installed!\n\n In the first part, we are going to create the custom tables requires for this solution (using an ARM template). Then we are going to configure the Data Connector."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Create custom tables in Log Analytic Workspace (ARM Template)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fvectranetworks%2FMicrosoft_Sentinel%2Fmain%2FStream%2FAMA%2FARM_Templates%2Fazuredeploy_CustomTables_connector.json)\n2. Provide the required details such as the resource group and Microsoft Log Analytics Workspace (**the workspace must exist!**)\n4. Click **Review + Create** to deploy.\n\n\t_Note: Once deployed, you must be able to see the custom tables in your Log Analytic Workspace (Settings ---> Tables)._"", ""instructions"": []}, {""title"": ""Step 2. Install the Syslog via AMA Data connector"", ""description"": ""_Note: This is only required if it has not been install yet in Microsoft Sentinel._\n1. Microsoft Sentinel workspace ---> Content Management ---> Content Hub.\n\n2. Search for 'Syslog' (Provider is Microsoft) and select it.\n\n3. Check 'Install' buton on the bottom of the right panel."", ""instructions"": []}, {""title"": ""Step 3. Configure the Syslog via AMA data connector"", ""description"": ""_Note: Two different Data Collection Rules (DCR) are going to be created during this step_\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector.\n\n2. Search for 'Syslog via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs: LOG_USER/LOG_NOTICE and LOG_LOCAL0/LOG_NOTICE.\n\n4. Create a first DCR (Data Collection Rule). Specify a name. Then, in the Resources tab, select the instance where AMA is going to run. In the Collect tab, select LOG_USER/LOG_NOTICE.\n\n5. Create a second DCR. Specify a different name. Then, in the Resources tab, choose the same host. In the Collect tab, select LOG_LOCAL0/LOG_NOTICE\n\n\n\n\tNote:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""In the next section, we are goning to modify the syslog-ng configuration that has been created where the AMA is deployed. Then, we are going to modify the DCR configuration to be able to sent the network metadata from Vectra Stream to different custom tables."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Modify the syslog-ng configuration"", ""description"": ""_Note: A DCR cannot have more than 10 output flows. As we have 16 custom tables in this solution, we need to split the traffic to two DCR using syslog-ng._\n1. Download the modified syslog-ng configuration file: [azuremonitoragent-tcp.conf](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/syslog-ng/azuremonitoragent-tcp.conf).\n2. Log into the instance where syslog-ng/AMA is running.\n3. Browse to /etc/syslog-ng/conf.d/ and replace the content of _azuremonitoragent-tcp.conf_ file with the one that you just downloaded.\n4. Save and restart syslog-ng (_systemctl restart syslog-ng_)."", ""instructions"": []}, {""title"": ""Step 2. Modify the Data Collection rules configuration"", ""description"": ""_Note: The Data Collection Rules that have been created are located in Azure Monitor (**Monitor ---> Settings ---> Data Collection Rules**)_\n 1. Locate the 2 DCR that you created in Microsoft Sentinel.\n 2. Open the first DCR where Syslog facility is LOG_USER. Then go to Automation ---> Export template ---> Deploy --> Edit template.\n 3. Download the dataFlows configuration for LOG_USER DCR: [Stream_DataFlows_dcr1.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr1.json) and find/replace the destination placeholder '' with your workspace name.\n 4. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 5. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace (same as step 3).\n 6. Save --> Review + Create --> Create.\n 7. Open the second DCR than you created (Facilily is LOG_LOCAL0) and edit the template (Automation ---> Export template ---> Deploy --> Edit template).\n 8. Download the dataFlows configuration for LOG_LOCAL0 DCR: [Stream_DataFlows_dcr2.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr2.json) and find/replace the destination placeholder '' with your wokrspace name.\n 9. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 10. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace.\n 11. Save --> Review + Create --> Create."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Configure Vectra AI Stream"", ""description"": ""\nConfigure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via AMA.\n\nFrom the Vectra UI, navigate to Settings > Stream and Edit the destination configuration:\n\n 1. Select Publisher: RAW JSON\n 2. Set the server IP or hostname (which is the host whhere AMA is running)\n 3. Set all the port to **514**.\n 4. Save.""}, {""title"": ""Run the following command to validate (or set up) that syslog-ng is listening on port 514"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Stream configuration"", ""description"": ""must be configured to export Stream metadata in JSON""}, {""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" -"vectra_ssl_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on a Kusto Function to work as expected which are deployed as part of the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** Vectra AI Stream connector is only available for **Linux** agents with **syslog-ng**. Make sure that syslog-ng is installed!\n\n In the first part, we are going to create the custom tables requires for this solution (using an ARM template). Then we are going to configure the Data Connector."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Create custom tables in Log Analytic Workspace (ARM Template)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fvectranetworks%2FMicrosoft_Sentinel%2Fmain%2FStream%2FAMA%2FARM_Templates%2Fazuredeploy_CustomTables_connector.json)\n2. Provide the required details such as the resource group and Microsoft Log Analytics Workspace (**the workspace must exist!**)\n4. Click **Review + Create** to deploy.\n\n\t_Note: Once deployed, you must be able to see the custom tables in your Log Analytic Workspace (Settings ---> Tables)._"", ""instructions"": []}, {""title"": ""Step 2. Install the Syslog via AMA Data connector"", ""description"": ""_Note: This is only required if it has not been install yet in Microsoft Sentinel._\n1. Microsoft Sentinel workspace ---> Content Management ---> Content Hub.\n\n2. Search for 'Syslog' (Provider is Microsoft) and select it.\n\n3. Check 'Install' buton on the bottom of the right panel."", ""instructions"": []}, {""title"": ""Step 3. Configure the Syslog via AMA data connector"", ""description"": ""_Note: Two different Data Collection Rules (DCR) are going to be created during this step_\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector.\n\n2. Search for 'Syslog via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs: LOG_USER/LOG_NOTICE and LOG_LOCAL0/LOG_NOTICE.\n\n4. Create a first DCR (Data Collection Rule). Specify a name. Then, in the Resources tab, select the instance where AMA is going to run. In the Collect tab, select LOG_USER/LOG_NOTICE.\n\n5. Create a second DCR. Specify a different name. Then, in the Resources tab, choose the same host. In the Collect tab, select LOG_LOCAL0/LOG_NOTICE\n\n\n\n\tNote:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""In the next section, we are goning to modify the syslog-ng configuration that has been created where the AMA is deployed. Then, we are going to modify the DCR configuration to be able to sent the network metadata from Vectra Stream to different custom tables."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Modify the syslog-ng configuration"", ""description"": ""_Note: A DCR cannot have more than 10 output flows. As we have 16 custom tables in this solution, we need to split the traffic to two DCR using syslog-ng._\n1. Download the modified syslog-ng configuration file: [azuremonitoragent-tcp.conf](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/syslog-ng/azuremonitoragent-tcp.conf).\n2. Log into the instance where syslog-ng/AMA is running.\n3. Browse to /etc/syslog-ng/conf.d/ and replace the content of _azuremonitoragent-tcp.conf_ file with the one that you just downloaded.\n4. Save and restart syslog-ng (_systemctl restart syslog-ng_)."", ""instructions"": []}, {""title"": ""Step 2. Modify the Data Collection rules configuration"", ""description"": ""_Note: The Data Collection Rules that have been created are located in Azure Monitor (**Monitor ---> Settings ---> Data Collection Rules**)_\n 1. Locate the 2 DCR that you created in Microsoft Sentinel.\n 2. Open the first DCR where Syslog facility is LOG_USER. Then go to Automation ---> Export template ---> Deploy --> Edit template.\n 3. Download the dataFlows configuration for LOG_USER DCR: [Stream_DataFlows_dcr1.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr1.json) and find/replace the destination placeholder '' with your workspace name.\n 4. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 5. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace (same as step 3).\n 6. Save --> Review + Create --> Create.\n 7. Open the second DCR than you created (Facilily is LOG_LOCAL0) and edit the template (Automation ---> Export template ---> Deploy --> Edit template).\n 8. Download the dataFlows configuration for LOG_LOCAL0 DCR: [Stream_DataFlows_dcr2.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr2.json) and find/replace the destination placeholder '' with your wokrspace name.\n 9. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 10. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace.\n 11. Save --> Review + Create --> Create."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Configure Vectra AI Stream"", ""description"": ""\nConfigure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via AMA.\n\nFrom the Vectra UI, navigate to Settings > Stream and Edit the destination configuration:\n\n 1. Select Publisher: RAW JSON\n 2. Set the server IP or hostname (which is the host whhere AMA is running)\n 3. Set all the port to **514**.\n 4. Save.""}, {""title"": ""Run the following command to validate (or set up) that syslog-ng is listening on port 514"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Stream configuration"", ""description"": ""must be configured to export Stream metadata in JSON""}, {""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" -"vectra_x509_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on a Kusto Function to work as expected which are deployed as part of the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** Vectra AI Stream connector is only available for **Linux** agents with **syslog-ng**. Make sure that syslog-ng is installed!\n\n In the first part, we are going to create the custom tables requires for this solution (using an ARM template). Then we are going to configure the Data Connector."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Create custom tables in Log Analytic Workspace (ARM Template)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fvectranetworks%2FMicrosoft_Sentinel%2Fmain%2FStream%2FAMA%2FARM_Templates%2Fazuredeploy_CustomTables_connector.json)\n2. Provide the required details such as the resource group and Microsoft Log Analytics Workspace (**the workspace must exist!**)\n4. Click **Review + Create** to deploy.\n\n\t_Note: Once deployed, you must be able to see the custom tables in your Log Analytic Workspace (Settings ---> Tables)._"", ""instructions"": []}, {""title"": ""Step 2. Install the Syslog via AMA Data connector"", ""description"": ""_Note: This is only required if it has not been install yet in Microsoft Sentinel._\n1. Microsoft Sentinel workspace ---> Content Management ---> Content Hub.\n\n2. Search for 'Syslog' (Provider is Microsoft) and select it.\n\n3. Check 'Install' buton on the bottom of the right panel."", ""instructions"": []}, {""title"": ""Step 3. Configure the Syslog via AMA data connector"", ""description"": ""_Note: Two different Data Collection Rules (DCR) are going to be created during this step_\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector.\n\n2. Search for 'Syslog via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs: LOG_USER/LOG_NOTICE and LOG_LOCAL0/LOG_NOTICE.\n\n4. Create a first DCR (Data Collection Rule). Specify a name. Then, in the Resources tab, select the instance where AMA is going to run. In the Collect tab, select LOG_USER/LOG_NOTICE.\n\n5. Create a second DCR. Specify a different name. Then, in the Resources tab, choose the same host. In the Collect tab, select LOG_LOCAL0/LOG_NOTICE\n\n\n\n\tNote:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""In the next section, we are goning to modify the syslog-ng configuration that has been created where the AMA is deployed. Then, we are going to modify the DCR configuration to be able to sent the network metadata from Vectra Stream to different custom tables."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Modify the syslog-ng configuration"", ""description"": ""_Note: A DCR cannot have more than 10 output flows. As we have 16 custom tables in this solution, we need to split the traffic to two DCR using syslog-ng._\n1. Download the modified syslog-ng configuration file: [azuremonitoragent-tcp.conf](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/syslog-ng/azuremonitoragent-tcp.conf).\n2. Log into the instance where syslog-ng/AMA is running.\n3. Browse to /etc/syslog-ng/conf.d/ and replace the content of _azuremonitoragent-tcp.conf_ file with the one that you just downloaded.\n4. Save and restart syslog-ng (_systemctl restart syslog-ng_)."", ""instructions"": []}, {""title"": ""Step 2. Modify the Data Collection rules configuration"", ""description"": ""_Note: The Data Collection Rules that have been created are located in Azure Monitor (**Monitor ---> Settings ---> Data Collection Rules**)_\n 1. Locate the 2 DCR that you created in Microsoft Sentinel.\n 2. Open the first DCR where Syslog facility is LOG_USER. Then go to Automation ---> Export template ---> Deploy --> Edit template.\n 3. Download the dataFlows configuration for LOG_USER DCR: [Stream_DataFlows_dcr1.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr1.json) and find/replace the destination placeholder '' with your workspace name.\n 4. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 5. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace (same as step 3).\n 6. Save --> Review + Create --> Create.\n 7. Open the second DCR than you created (Facilily is LOG_LOCAL0) and edit the template (Automation ---> Export template ---> Deploy --> Edit template).\n 8. Download the dataFlows configuration for LOG_LOCAL0 DCR: [Stream_DataFlows_dcr2.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr2.json) and find/replace the destination placeholder '' with your wokrspace name.\n 9. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 10. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace.\n 11. Save --> Review + Create --> Create."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Configure Vectra AI Stream"", ""description"": ""\nConfigure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via AMA.\n\nFrom the Vectra UI, navigate to Settings > Stream and Edit the destination configuration:\n\n 1. Select Publisher: RAW JSON\n 2. Set the server IP or hostname (which is the host whhere AMA is running)\n 3. Set all the port to **514**.\n 4. Save.""}, {""title"": ""Run the following command to validate (or set up) that syslog-ng is listening on port 514"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Stream configuration"", ""description"": ""must be configured to export Stream metadata in JSON""}, {""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" -"","Vectra XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20XDR","vectraaiinc","vectra-xdr-for-microsoft-sentinel","2023-07-04","2024-08-01","","Vectra Support","Partner","https://www.vectra.ai/support","","domains","","","","","","","false","","false" -"Audits_Data_CL","Vectra XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20XDR","vectraaiinc","vectra-xdr-for-microsoft-sentinel","2023-07-04","2024-08-01","","Vectra Support","Partner","https://www.vectra.ai/support","","domains","VectraXDR","Vectra","Vectra XDR","The [Vectra XDR](https://www.vectra.ai/) connector gives the capability to ingest Vectra Detections, Audits, Entity Scoring, Lockdown, Health and Entities data into Microsoft Sentinel through the Vectra REST API. Refer to the API documentation: `https://support.vectra.ai/s/article/KB-VS-1666` for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Vectra API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. Follow these steps for [Detections Parser](https://aka.ms/sentinel-VectraDetections-parser), [Audits Parser](https://aka.ms/sentinel-VectraAudits-parser), [Entity Scoring Parser](https://aka.ms/sentinel-VectraEntityScoring-parser), [Lockdown Parser](https://aka.ms/sentinel-VectraLockdown-parser) and [Health Parser](https://aka.ms/sentinel-VectraHealth-parser) to create the Kusto functions alias, **VectraDetections**, **VectraAudits**, **VectraEntityScoring**, **VectraLockdown** and **VectraHealth**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Vectra API Credentials**\n\n Follow these instructions to create a Vectra Client ID and Client Secret.\n 1. Log into your Vectra portal\n 2. Navigate to Manage -> API Clients\n 3. From the API Clients page, select 'Add API Client' to create a new client.\n 4. Add Client Name, select Role and click on Generate Credentials to obtain your client credentials. \n 5. Be sure to record your Client ID and Secret Key for safekeeping. You will need these two pieces of information to obtain an access token from the Vectra API. An access token is required to make requests to all of the Vectra API endpoints.""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Vectra Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of Vectra Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Vectra Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Create a Keyvault**\n\n Follow these instructions to create a new Keyvault.\n 1. In the Azure portal, Go to **Key vaults** and click on Create.\n 2. Select Subsciption, Resource Group and provide unique name of keyvault.""}, {""title"": """", ""description"": ""**STEP 7 - Create Access Policy in Keyvault**\n\n Follow these instructions to create access policy in Keyvault.\n 1. Go to keyvaults, select your keyvault, go to Access policies on left side panel, click on create.\n 2. Select all keys & secrets permissions. Click next.\n 3. In the principal section, search by application name which was generated in STEP - 2. Click next.\n\n **Note: **Ensure the Permission model in the Access Configuration of Key Vault is set to **'Vault access policy'**""}, {""title"": """", ""description"": ""**STEP 8 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Vectra data connector, have the Vectra API Authorization Credentials readily available..""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Vectra connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-VectraXDRAPI-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-VectraXDRAPI-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace Name \n\t\tVectra Base URL (https://) \n\t\tVectra Client Id - Health \n\t\tVectra Client Secret Key - Health \n\t\tVectra Client Id - Entity Scoring \n\t\tVectra Client Secret - Entity Scoring \n\t\tVectra Client Id - Detections \n\t\tVectra Client Secret - Detections \n\t\tVectra Client Id - Audits \n\t\tVectra Client Secret - Audits \n\t\tVectra Client Id - Lockdown \n\t\tVectra Client Secret - Lockdown \n\t\tVectra Client Id - Host-Entity \n\t\tVectra Client Secret - Host-Entity \n\t\tVectra Client Id - Account-Entity \n\t\tVectra Client Secret - Account-Entity \n\t\tKey Vault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tTenant Id \n\t\tAzure Entra ObjectID \n\t\tStartTime (in MM/DD/YYYY HH:MM:SS Format) \n\t\tInclude Score Decrease \n\t\tAudits Table Name \n\t\tDetections Table Name \n\t\tEntity Scoring Table Name \n\t\tLockdown Table Name \n\t\tHealth Table Name \n\t\tEntities Table Name \n\t\tExclude Group Details From Detections\n\t\tLog Level (Default: INFO) \n\t\tLockdown Schedule \n\t\tHealth Schedule \n\t\tDetections Schedule \n\t\tAudits Schedule \n\t\tEntity Scoring Schedule \n\t\tEntities Schedule \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Vectra data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-VectraXDR320-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. VECTRAXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tVectra Base URL (https://) \n\t\tVectra Client Id - Health \n\t\tVectra Client Secret Key - Health \n\t\tVectra Client Id - Entity Scoring \n\t\tVectra Client Secret - Entity Scoring \n\t\tVectra Client Id - Detections \n\t\tVectra Client Secret - Detections \n\t\tVectra Client Id - Audits \n\t\tVectra Client Secret - Audits \n\t\tVectra Client Id - Lockdown \n\t\tVectra Client Secret - Lockdown \n\t\tVectra Client Id - Host-Entity \n\t\tVectra Client Secret - Host-Entity \n\t\tVectra Client Id - Account-Entity \n\t\tVectra Client Secret - Account-Entity \n\t\tKey Vault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tTenant Id \n\t\tStartTime (in MM/DD/YYYY HH:MM:SS Format) \n\t\tInclude Score Decrease \n\t\tAudits Table Name \n\t\tDetections Table Name \n\t\tEntity Scoring Table Name \n\t\tLockdown Table Name \n\t\tHealth Table Name \n\t\tEntities Table Name \n\t\tLog Level (Default: INFO) \n\t\tLockdown Schedule \n\t\tHealth Schedule \n\t\tDetections Schedule \n\t\tAudits Schedule \n\t\tEntity Scoring Schedule \n\t\tEntities Schedule \n\t\tlogAnalyticsUri (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Vectra Client ID** and **Client Secret** is required for Health, Entity Scoring, Entities, Detections, Lockdown and Audit data collection. See the documentation to learn more about API on the `https://support.vectra.ai/s/article/KB-VS-1666`.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20XDR/Data%20Connectors/VectraDataConnector/VectraXDR_API_FunctionApp.json","true" -"Detections_Data_CL","Vectra XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20XDR","vectraaiinc","vectra-xdr-for-microsoft-sentinel","2023-07-04","2024-08-01","","Vectra Support","Partner","https://www.vectra.ai/support","","domains","VectraXDR","Vectra","Vectra XDR","The [Vectra XDR](https://www.vectra.ai/) connector gives the capability to ingest Vectra Detections, Audits, Entity Scoring, Lockdown, Health and Entities data into Microsoft Sentinel through the Vectra REST API. Refer to the API documentation: `https://support.vectra.ai/s/article/KB-VS-1666` for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Vectra API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. Follow these steps for [Detections Parser](https://aka.ms/sentinel-VectraDetections-parser), [Audits Parser](https://aka.ms/sentinel-VectraAudits-parser), [Entity Scoring Parser](https://aka.ms/sentinel-VectraEntityScoring-parser), [Lockdown Parser](https://aka.ms/sentinel-VectraLockdown-parser) and [Health Parser](https://aka.ms/sentinel-VectraHealth-parser) to create the Kusto functions alias, **VectraDetections**, **VectraAudits**, **VectraEntityScoring**, **VectraLockdown** and **VectraHealth**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Vectra API Credentials**\n\n Follow these instructions to create a Vectra Client ID and Client Secret.\n 1. Log into your Vectra portal\n 2. Navigate to Manage -> API Clients\n 3. From the API Clients page, select 'Add API Client' to create a new client.\n 4. Add Client Name, select Role and click on Generate Credentials to obtain your client credentials. \n 5. Be sure to record your Client ID and Secret Key for safekeeping. You will need these two pieces of information to obtain an access token from the Vectra API. An access token is required to make requests to all of the Vectra API endpoints.""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Vectra Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of Vectra Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Vectra Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Create a Keyvault**\n\n Follow these instructions to create a new Keyvault.\n 1. In the Azure portal, Go to **Key vaults** and click on Create.\n 2. Select Subsciption, Resource Group and provide unique name of keyvault.""}, {""title"": """", ""description"": ""**STEP 7 - Create Access Policy in Keyvault**\n\n Follow these instructions to create access policy in Keyvault.\n 1. Go to keyvaults, select your keyvault, go to Access policies on left side panel, click on create.\n 2. Select all keys & secrets permissions. Click next.\n 3. In the principal section, search by application name which was generated in STEP - 2. Click next.\n\n **Note: **Ensure the Permission model in the Access Configuration of Key Vault is set to **'Vault access policy'**""}, {""title"": """", ""description"": ""**STEP 8 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Vectra data connector, have the Vectra API Authorization Credentials readily available..""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Vectra connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-VectraXDRAPI-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-VectraXDRAPI-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace Name \n\t\tVectra Base URL (https://) \n\t\tVectra Client Id - Health \n\t\tVectra Client Secret Key - Health \n\t\tVectra Client Id - Entity Scoring \n\t\tVectra Client Secret - Entity Scoring \n\t\tVectra Client Id - Detections \n\t\tVectra Client Secret - Detections \n\t\tVectra Client Id - Audits \n\t\tVectra Client Secret - Audits \n\t\tVectra Client Id - Lockdown \n\t\tVectra Client Secret - Lockdown \n\t\tVectra Client Id - Host-Entity \n\t\tVectra Client Secret - Host-Entity \n\t\tVectra Client Id - Account-Entity \n\t\tVectra Client Secret - Account-Entity \n\t\tKey Vault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tTenant Id \n\t\tAzure Entra ObjectID \n\t\tStartTime (in MM/DD/YYYY HH:MM:SS Format) \n\t\tInclude Score Decrease \n\t\tAudits Table Name \n\t\tDetections Table Name \n\t\tEntity Scoring Table Name \n\t\tLockdown Table Name \n\t\tHealth Table Name \n\t\tEntities Table Name \n\t\tExclude Group Details From Detections\n\t\tLog Level (Default: INFO) \n\t\tLockdown Schedule \n\t\tHealth Schedule \n\t\tDetections Schedule \n\t\tAudits Schedule \n\t\tEntity Scoring Schedule \n\t\tEntities Schedule \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Vectra data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-VectraXDR320-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. VECTRAXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tVectra Base URL (https://) \n\t\tVectra Client Id - Health \n\t\tVectra Client Secret Key - Health \n\t\tVectra Client Id - Entity Scoring \n\t\tVectra Client Secret - Entity Scoring \n\t\tVectra Client Id - Detections \n\t\tVectra Client Secret - Detections \n\t\tVectra Client Id - Audits \n\t\tVectra Client Secret - Audits \n\t\tVectra Client Id - Lockdown \n\t\tVectra Client Secret - Lockdown \n\t\tVectra Client Id - Host-Entity \n\t\tVectra Client Secret - Host-Entity \n\t\tVectra Client Id - Account-Entity \n\t\tVectra Client Secret - Account-Entity \n\t\tKey Vault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tTenant Id \n\t\tStartTime (in MM/DD/YYYY HH:MM:SS Format) \n\t\tInclude Score Decrease \n\t\tAudits Table Name \n\t\tDetections Table Name \n\t\tEntity Scoring Table Name \n\t\tLockdown Table Name \n\t\tHealth Table Name \n\t\tEntities Table Name \n\t\tLog Level (Default: INFO) \n\t\tLockdown Schedule \n\t\tHealth Schedule \n\t\tDetections Schedule \n\t\tAudits Schedule \n\t\tEntity Scoring Schedule \n\t\tEntities Schedule \n\t\tlogAnalyticsUri (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Vectra Client ID** and **Client Secret** is required for Health, Entity Scoring, Entities, Detections, Lockdown and Audit data collection. See the documentation to learn more about API on the `https://support.vectra.ai/s/article/KB-VS-1666`.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20XDR/Data%20Connectors/VectraDataConnector/VectraXDR_API_FunctionApp.json","true" -"Entities_Data_CL","Vectra XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20XDR","vectraaiinc","vectra-xdr-for-microsoft-sentinel","2023-07-04","2024-08-01","","Vectra Support","Partner","https://www.vectra.ai/support","","domains","VectraXDR","Vectra","Vectra XDR","The [Vectra XDR](https://www.vectra.ai/) connector gives the capability to ingest Vectra Detections, Audits, Entity Scoring, Lockdown, Health and Entities data into Microsoft Sentinel through the Vectra REST API. Refer to the API documentation: `https://support.vectra.ai/s/article/KB-VS-1666` for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Vectra API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. Follow these steps for [Detections Parser](https://aka.ms/sentinel-VectraDetections-parser), [Audits Parser](https://aka.ms/sentinel-VectraAudits-parser), [Entity Scoring Parser](https://aka.ms/sentinel-VectraEntityScoring-parser), [Lockdown Parser](https://aka.ms/sentinel-VectraLockdown-parser) and [Health Parser](https://aka.ms/sentinel-VectraHealth-parser) to create the Kusto functions alias, **VectraDetections**, **VectraAudits**, **VectraEntityScoring**, **VectraLockdown** and **VectraHealth**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Vectra API Credentials**\n\n Follow these instructions to create a Vectra Client ID and Client Secret.\n 1. Log into your Vectra portal\n 2. Navigate to Manage -> API Clients\n 3. From the API Clients page, select 'Add API Client' to create a new client.\n 4. Add Client Name, select Role and click on Generate Credentials to obtain your client credentials. \n 5. Be sure to record your Client ID and Secret Key for safekeeping. You will need these two pieces of information to obtain an access token from the Vectra API. An access token is required to make requests to all of the Vectra API endpoints.""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Vectra Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of Vectra Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Vectra Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Create a Keyvault**\n\n Follow these instructions to create a new Keyvault.\n 1. In the Azure portal, Go to **Key vaults** and click on Create.\n 2. Select Subsciption, Resource Group and provide unique name of keyvault.""}, {""title"": """", ""description"": ""**STEP 7 - Create Access Policy in Keyvault**\n\n Follow these instructions to create access policy in Keyvault.\n 1. Go to keyvaults, select your keyvault, go to Access policies on left side panel, click on create.\n 2. Select all keys & secrets permissions. Click next.\n 3. In the principal section, search by application name which was generated in STEP - 2. Click next.\n\n **Note: **Ensure the Permission model in the Access Configuration of Key Vault is set to **'Vault access policy'**""}, {""title"": """", ""description"": ""**STEP 8 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Vectra data connector, have the Vectra API Authorization Credentials readily available..""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Vectra connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-VectraXDRAPI-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-VectraXDRAPI-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace Name \n\t\tVectra Base URL (https://) \n\t\tVectra Client Id - Health \n\t\tVectra Client Secret Key - Health \n\t\tVectra Client Id - Entity Scoring \n\t\tVectra Client Secret - Entity Scoring \n\t\tVectra Client Id - Detections \n\t\tVectra Client Secret - Detections \n\t\tVectra Client Id - Audits \n\t\tVectra Client Secret - Audits \n\t\tVectra Client Id - Lockdown \n\t\tVectra Client Secret - Lockdown \n\t\tVectra Client Id - Host-Entity \n\t\tVectra Client Secret - Host-Entity \n\t\tVectra Client Id - Account-Entity \n\t\tVectra Client Secret - Account-Entity \n\t\tKey Vault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tTenant Id \n\t\tAzure Entra ObjectID \n\t\tStartTime (in MM/DD/YYYY HH:MM:SS Format) \n\t\tInclude Score Decrease \n\t\tAudits Table Name \n\t\tDetections Table Name \n\t\tEntity Scoring Table Name \n\t\tLockdown Table Name \n\t\tHealth Table Name \n\t\tEntities Table Name \n\t\tExclude Group Details From Detections\n\t\tLog Level (Default: INFO) \n\t\tLockdown Schedule \n\t\tHealth Schedule \n\t\tDetections Schedule \n\t\tAudits Schedule \n\t\tEntity Scoring Schedule \n\t\tEntities Schedule \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Vectra data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-VectraXDR320-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. VECTRAXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tVectra Base URL (https://) \n\t\tVectra Client Id - Health \n\t\tVectra Client Secret Key - Health \n\t\tVectra Client Id - Entity Scoring \n\t\tVectra Client Secret - Entity Scoring \n\t\tVectra Client Id - Detections \n\t\tVectra Client Secret - Detections \n\t\tVectra Client Id - Audits \n\t\tVectra Client Secret - Audits \n\t\tVectra Client Id - Lockdown \n\t\tVectra Client Secret - Lockdown \n\t\tVectra Client Id - Host-Entity \n\t\tVectra Client Secret - Host-Entity \n\t\tVectra Client Id - Account-Entity \n\t\tVectra Client Secret - Account-Entity \n\t\tKey Vault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tTenant Id \n\t\tStartTime (in MM/DD/YYYY HH:MM:SS Format) \n\t\tInclude Score Decrease \n\t\tAudits Table Name \n\t\tDetections Table Name \n\t\tEntity Scoring Table Name \n\t\tLockdown Table Name \n\t\tHealth Table Name \n\t\tEntities Table Name \n\t\tLog Level (Default: INFO) \n\t\tLockdown Schedule \n\t\tHealth Schedule \n\t\tDetections Schedule \n\t\tAudits Schedule \n\t\tEntity Scoring Schedule \n\t\tEntities Schedule \n\t\tlogAnalyticsUri (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Vectra Client ID** and **Client Secret** is required for Health, Entity Scoring, Entities, Detections, Lockdown and Audit data collection. See the documentation to learn more about API on the `https://support.vectra.ai/s/article/KB-VS-1666`.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20XDR/Data%20Connectors/VectraDataConnector/VectraXDR_API_FunctionApp.json","true" -"Entity_Scoring_Data_CL","Vectra XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20XDR","vectraaiinc","vectra-xdr-for-microsoft-sentinel","2023-07-04","2024-08-01","","Vectra Support","Partner","https://www.vectra.ai/support","","domains","VectraXDR","Vectra","Vectra XDR","The [Vectra XDR](https://www.vectra.ai/) connector gives the capability to ingest Vectra Detections, Audits, Entity Scoring, Lockdown, Health and Entities data into Microsoft Sentinel through the Vectra REST API. Refer to the API documentation: `https://support.vectra.ai/s/article/KB-VS-1666` for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Vectra API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. Follow these steps for [Detections Parser](https://aka.ms/sentinel-VectraDetections-parser), [Audits Parser](https://aka.ms/sentinel-VectraAudits-parser), [Entity Scoring Parser](https://aka.ms/sentinel-VectraEntityScoring-parser), [Lockdown Parser](https://aka.ms/sentinel-VectraLockdown-parser) and [Health Parser](https://aka.ms/sentinel-VectraHealth-parser) to create the Kusto functions alias, **VectraDetections**, **VectraAudits**, **VectraEntityScoring**, **VectraLockdown** and **VectraHealth**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Vectra API Credentials**\n\n Follow these instructions to create a Vectra Client ID and Client Secret.\n 1. Log into your Vectra portal\n 2. Navigate to Manage -> API Clients\n 3. From the API Clients page, select 'Add API Client' to create a new client.\n 4. Add Client Name, select Role and click on Generate Credentials to obtain your client credentials. \n 5. Be sure to record your Client ID and Secret Key for safekeeping. You will need these two pieces of information to obtain an access token from the Vectra API. An access token is required to make requests to all of the Vectra API endpoints.""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Vectra Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of Vectra Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Vectra Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Create a Keyvault**\n\n Follow these instructions to create a new Keyvault.\n 1. In the Azure portal, Go to **Key vaults** and click on Create.\n 2. Select Subsciption, Resource Group and provide unique name of keyvault.""}, {""title"": """", ""description"": ""**STEP 7 - Create Access Policy in Keyvault**\n\n Follow these instructions to create access policy in Keyvault.\n 1. Go to keyvaults, select your keyvault, go to Access policies on left side panel, click on create.\n 2. Select all keys & secrets permissions. Click next.\n 3. In the principal section, search by application name which was generated in STEP - 2. Click next.\n\n **Note: **Ensure the Permission model in the Access Configuration of Key Vault is set to **'Vault access policy'**""}, {""title"": """", ""description"": ""**STEP 8 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Vectra data connector, have the Vectra API Authorization Credentials readily available..""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Vectra connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-VectraXDRAPI-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-VectraXDRAPI-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace Name \n\t\tVectra Base URL (https://) \n\t\tVectra Client Id - Health \n\t\tVectra Client Secret Key - Health \n\t\tVectra Client Id - Entity Scoring \n\t\tVectra Client Secret - Entity Scoring \n\t\tVectra Client Id - Detections \n\t\tVectra Client Secret - Detections \n\t\tVectra Client Id - Audits \n\t\tVectra Client Secret - Audits \n\t\tVectra Client Id - Lockdown \n\t\tVectra Client Secret - Lockdown \n\t\tVectra Client Id - Host-Entity \n\t\tVectra Client Secret - Host-Entity \n\t\tVectra Client Id - Account-Entity \n\t\tVectra Client Secret - Account-Entity \n\t\tKey Vault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tTenant Id \n\t\tAzure Entra ObjectID \n\t\tStartTime (in MM/DD/YYYY HH:MM:SS Format) \n\t\tInclude Score Decrease \n\t\tAudits Table Name \n\t\tDetections Table Name \n\t\tEntity Scoring Table Name \n\t\tLockdown Table Name \n\t\tHealth Table Name \n\t\tEntities Table Name \n\t\tExclude Group Details From Detections\n\t\tLog Level (Default: INFO) \n\t\tLockdown Schedule \n\t\tHealth Schedule \n\t\tDetections Schedule \n\t\tAudits Schedule \n\t\tEntity Scoring Schedule \n\t\tEntities Schedule \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Vectra data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-VectraXDR320-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. VECTRAXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tVectra Base URL (https://) \n\t\tVectra Client Id - Health \n\t\tVectra Client Secret Key - Health \n\t\tVectra Client Id - Entity Scoring \n\t\tVectra Client Secret - Entity Scoring \n\t\tVectra Client Id - Detections \n\t\tVectra Client Secret - Detections \n\t\tVectra Client Id - Audits \n\t\tVectra Client Secret - Audits \n\t\tVectra Client Id - Lockdown \n\t\tVectra Client Secret - Lockdown \n\t\tVectra Client Id - Host-Entity \n\t\tVectra Client Secret - Host-Entity \n\t\tVectra Client Id - Account-Entity \n\t\tVectra Client Secret - Account-Entity \n\t\tKey Vault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tTenant Id \n\t\tStartTime (in MM/DD/YYYY HH:MM:SS Format) \n\t\tInclude Score Decrease \n\t\tAudits Table Name \n\t\tDetections Table Name \n\t\tEntity Scoring Table Name \n\t\tLockdown Table Name \n\t\tHealth Table Name \n\t\tEntities Table Name \n\t\tLog Level (Default: INFO) \n\t\tLockdown Schedule \n\t\tHealth Schedule \n\t\tDetections Schedule \n\t\tAudits Schedule \n\t\tEntity Scoring Schedule \n\t\tEntities Schedule \n\t\tlogAnalyticsUri (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Vectra Client ID** and **Client Secret** is required for Health, Entity Scoring, Entities, Detections, Lockdown and Audit data collection. See the documentation to learn more about API on the `https://support.vectra.ai/s/article/KB-VS-1666`.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20XDR/Data%20Connectors/VectraDataConnector/VectraXDR_API_FunctionApp.json","true" -"Health_Data_CL","Vectra XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20XDR","vectraaiinc","vectra-xdr-for-microsoft-sentinel","2023-07-04","2024-08-01","","Vectra Support","Partner","https://www.vectra.ai/support","","domains","VectraXDR","Vectra","Vectra XDR","The [Vectra XDR](https://www.vectra.ai/) connector gives the capability to ingest Vectra Detections, Audits, Entity Scoring, Lockdown, Health and Entities data into Microsoft Sentinel through the Vectra REST API. Refer to the API documentation: `https://support.vectra.ai/s/article/KB-VS-1666` for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Vectra API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. Follow these steps for [Detections Parser](https://aka.ms/sentinel-VectraDetections-parser), [Audits Parser](https://aka.ms/sentinel-VectraAudits-parser), [Entity Scoring Parser](https://aka.ms/sentinel-VectraEntityScoring-parser), [Lockdown Parser](https://aka.ms/sentinel-VectraLockdown-parser) and [Health Parser](https://aka.ms/sentinel-VectraHealth-parser) to create the Kusto functions alias, **VectraDetections**, **VectraAudits**, **VectraEntityScoring**, **VectraLockdown** and **VectraHealth**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Vectra API Credentials**\n\n Follow these instructions to create a Vectra Client ID and Client Secret.\n 1. Log into your Vectra portal\n 2. Navigate to Manage -> API Clients\n 3. From the API Clients page, select 'Add API Client' to create a new client.\n 4. Add Client Name, select Role and click on Generate Credentials to obtain your client credentials. \n 5. Be sure to record your Client ID and Secret Key for safekeeping. You will need these two pieces of information to obtain an access token from the Vectra API. An access token is required to make requests to all of the Vectra API endpoints.""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Vectra Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of Vectra Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Vectra Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Create a Keyvault**\n\n Follow these instructions to create a new Keyvault.\n 1. In the Azure portal, Go to **Key vaults** and click on Create.\n 2. Select Subsciption, Resource Group and provide unique name of keyvault.""}, {""title"": """", ""description"": ""**STEP 7 - Create Access Policy in Keyvault**\n\n Follow these instructions to create access policy in Keyvault.\n 1. Go to keyvaults, select your keyvault, go to Access policies on left side panel, click on create.\n 2. Select all keys & secrets permissions. Click next.\n 3. In the principal section, search by application name which was generated in STEP - 2. Click next.\n\n **Note: **Ensure the Permission model in the Access Configuration of Key Vault is set to **'Vault access policy'**""}, {""title"": """", ""description"": ""**STEP 8 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Vectra data connector, have the Vectra API Authorization Credentials readily available..""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Vectra connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-VectraXDRAPI-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-VectraXDRAPI-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace Name \n\t\tVectra Base URL (https://) \n\t\tVectra Client Id - Health \n\t\tVectra Client Secret Key - Health \n\t\tVectra Client Id - Entity Scoring \n\t\tVectra Client Secret - Entity Scoring \n\t\tVectra Client Id - Detections \n\t\tVectra Client Secret - Detections \n\t\tVectra Client Id - Audits \n\t\tVectra Client Secret - Audits \n\t\tVectra Client Id - Lockdown \n\t\tVectra Client Secret - Lockdown \n\t\tVectra Client Id - Host-Entity \n\t\tVectra Client Secret - Host-Entity \n\t\tVectra Client Id - Account-Entity \n\t\tVectra Client Secret - Account-Entity \n\t\tKey Vault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tTenant Id \n\t\tAzure Entra ObjectID \n\t\tStartTime (in MM/DD/YYYY HH:MM:SS Format) \n\t\tInclude Score Decrease \n\t\tAudits Table Name \n\t\tDetections Table Name \n\t\tEntity Scoring Table Name \n\t\tLockdown Table Name \n\t\tHealth Table Name \n\t\tEntities Table Name \n\t\tExclude Group Details From Detections\n\t\tLog Level (Default: INFO) \n\t\tLockdown Schedule \n\t\tHealth Schedule \n\t\tDetections Schedule \n\t\tAudits Schedule \n\t\tEntity Scoring Schedule \n\t\tEntities Schedule \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Vectra data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-VectraXDR320-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. VECTRAXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tVectra Base URL (https://) \n\t\tVectra Client Id - Health \n\t\tVectra Client Secret Key - Health \n\t\tVectra Client Id - Entity Scoring \n\t\tVectra Client Secret - Entity Scoring \n\t\tVectra Client Id - Detections \n\t\tVectra Client Secret - Detections \n\t\tVectra Client Id - Audits \n\t\tVectra Client Secret - Audits \n\t\tVectra Client Id - Lockdown \n\t\tVectra Client Secret - Lockdown \n\t\tVectra Client Id - Host-Entity \n\t\tVectra Client Secret - Host-Entity \n\t\tVectra Client Id - Account-Entity \n\t\tVectra Client Secret - Account-Entity \n\t\tKey Vault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tTenant Id \n\t\tStartTime (in MM/DD/YYYY HH:MM:SS Format) \n\t\tInclude Score Decrease \n\t\tAudits Table Name \n\t\tDetections Table Name \n\t\tEntity Scoring Table Name \n\t\tLockdown Table Name \n\t\tHealth Table Name \n\t\tEntities Table Name \n\t\tLog Level (Default: INFO) \n\t\tLockdown Schedule \n\t\tHealth Schedule \n\t\tDetections Schedule \n\t\tAudits Schedule \n\t\tEntity Scoring Schedule \n\t\tEntities Schedule \n\t\tlogAnalyticsUri (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Vectra Client ID** and **Client Secret** is required for Health, Entity Scoring, Entities, Detections, Lockdown and Audit data collection. See the documentation to learn more about API on the `https://support.vectra.ai/s/article/KB-VS-1666`.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20XDR/Data%20Connectors/VectraDataConnector/VectraXDR_API_FunctionApp.json","true" -"Lockdown_Data_CL","Vectra XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20XDR","vectraaiinc","vectra-xdr-for-microsoft-sentinel","2023-07-04","2024-08-01","","Vectra Support","Partner","https://www.vectra.ai/support","","domains","VectraXDR","Vectra","Vectra XDR","The [Vectra XDR](https://www.vectra.ai/) connector gives the capability to ingest Vectra Detections, Audits, Entity Scoring, Lockdown, Health and Entities data into Microsoft Sentinel through the Vectra REST API. Refer to the API documentation: `https://support.vectra.ai/s/article/KB-VS-1666` for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Vectra API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. Follow these steps for [Detections Parser](https://aka.ms/sentinel-VectraDetections-parser), [Audits Parser](https://aka.ms/sentinel-VectraAudits-parser), [Entity Scoring Parser](https://aka.ms/sentinel-VectraEntityScoring-parser), [Lockdown Parser](https://aka.ms/sentinel-VectraLockdown-parser) and [Health Parser](https://aka.ms/sentinel-VectraHealth-parser) to create the Kusto functions alias, **VectraDetections**, **VectraAudits**, **VectraEntityScoring**, **VectraLockdown** and **VectraHealth**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Vectra API Credentials**\n\n Follow these instructions to create a Vectra Client ID and Client Secret.\n 1. Log into your Vectra portal\n 2. Navigate to Manage -> API Clients\n 3. From the API Clients page, select 'Add API Client' to create a new client.\n 4. Add Client Name, select Role and click on Generate Credentials to obtain your client credentials. \n 5. Be sure to record your Client ID and Secret Key for safekeeping. You will need these two pieces of information to obtain an access token from the Vectra API. An access token is required to make requests to all of the Vectra API endpoints.""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Vectra Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of Vectra Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Vectra Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Create a Keyvault**\n\n Follow these instructions to create a new Keyvault.\n 1. In the Azure portal, Go to **Key vaults** and click on Create.\n 2. Select Subsciption, Resource Group and provide unique name of keyvault.""}, {""title"": """", ""description"": ""**STEP 7 - Create Access Policy in Keyvault**\n\n Follow these instructions to create access policy in Keyvault.\n 1. Go to keyvaults, select your keyvault, go to Access policies on left side panel, click on create.\n 2. Select all keys & secrets permissions. Click next.\n 3. In the principal section, search by application name which was generated in STEP - 2. Click next.\n\n **Note: **Ensure the Permission model in the Access Configuration of Key Vault is set to **'Vault access policy'**""}, {""title"": """", ""description"": ""**STEP 8 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Vectra data connector, have the Vectra API Authorization Credentials readily available..""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Vectra connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-VectraXDRAPI-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-VectraXDRAPI-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace Name \n\t\tVectra Base URL (https://) \n\t\tVectra Client Id - Health \n\t\tVectra Client Secret Key - Health \n\t\tVectra Client Id - Entity Scoring \n\t\tVectra Client Secret - Entity Scoring \n\t\tVectra Client Id - Detections \n\t\tVectra Client Secret - Detections \n\t\tVectra Client Id - Audits \n\t\tVectra Client Secret - Audits \n\t\tVectra Client Id - Lockdown \n\t\tVectra Client Secret - Lockdown \n\t\tVectra Client Id - Host-Entity \n\t\tVectra Client Secret - Host-Entity \n\t\tVectra Client Id - Account-Entity \n\t\tVectra Client Secret - Account-Entity \n\t\tKey Vault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tTenant Id \n\t\tAzure Entra ObjectID \n\t\tStartTime (in MM/DD/YYYY HH:MM:SS Format) \n\t\tInclude Score Decrease \n\t\tAudits Table Name \n\t\tDetections Table Name \n\t\tEntity Scoring Table Name \n\t\tLockdown Table Name \n\t\tHealth Table Name \n\t\tEntities Table Name \n\t\tExclude Group Details From Detections\n\t\tLog Level (Default: INFO) \n\t\tLockdown Schedule \n\t\tHealth Schedule \n\t\tDetections Schedule \n\t\tAudits Schedule \n\t\tEntity Scoring Schedule \n\t\tEntities Schedule \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Vectra data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-VectraXDR320-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. VECTRAXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tVectra Base URL (https://) \n\t\tVectra Client Id - Health \n\t\tVectra Client Secret Key - Health \n\t\tVectra Client Id - Entity Scoring \n\t\tVectra Client Secret - Entity Scoring \n\t\tVectra Client Id - Detections \n\t\tVectra Client Secret - Detections \n\t\tVectra Client Id - Audits \n\t\tVectra Client Secret - Audits \n\t\tVectra Client Id - Lockdown \n\t\tVectra Client Secret - Lockdown \n\t\tVectra Client Id - Host-Entity \n\t\tVectra Client Secret - Host-Entity \n\t\tVectra Client Id - Account-Entity \n\t\tVectra Client Secret - Account-Entity \n\t\tKey Vault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tTenant Id \n\t\tStartTime (in MM/DD/YYYY HH:MM:SS Format) \n\t\tInclude Score Decrease \n\t\tAudits Table Name \n\t\tDetections Table Name \n\t\tEntity Scoring Table Name \n\t\tLockdown Table Name \n\t\tHealth Table Name \n\t\tEntities Table Name \n\t\tLog Level (Default: INFO) \n\t\tLockdown Schedule \n\t\tHealth Schedule \n\t\tDetections Schedule \n\t\tAudits Schedule \n\t\tEntity Scoring Schedule \n\t\tEntities Schedule \n\t\tlogAnalyticsUri (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Vectra Client ID** and **Client Secret** is required for Health, Entity Scoring, Entities, Detections, Lockdown and Audit data collection. See the documentation to learn more about API on the `https://support.vectra.ai/s/article/KB-VS-1666`.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20XDR/Data%20Connectors/VectraDataConnector/VectraXDR_API_FunctionApp.json","true" -"","Veeam","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Veeam","veeamsoftware","azure-sentinel-solution-veeamapp","2025-08-26","","3.0.2","Veeam Software","Partner","https://helpcenter.veeam.com/docs/security_plugins_microsoft_sentinel/guide/","","domains","","","","","","","false","","false" -"VeeamAuthorizationEvents_CL","Veeam","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Veeam","veeamsoftware","azure-sentinel-solution-veeamapp","2025-08-26","","3.0.2","Veeam Software","Partner","https://helpcenter.veeam.com/docs/security_plugins_microsoft_sentinel/guide/","","domains","VeeamCustomTablesDataConnector","Veeam","Veeam Data Connector (using Azure Functions)","Veeam Data Connector allows you to ingest Veeam telemetry data from multiple custom tables into Microsoft Sentinel.

The connector supports integration with Veeam Backup & Replication, Veeam ONE and Coveware platforms to provide comprehensive monitoring and security analytics. The data is collected through Azure Functions and stored in custom Log Analytics tables with dedicated Data Collection Rules (DCR) and Data Collection Endpoints (DCE).

**Custom Tables Included:**
- **VeeamMalwareEvents_CL**: Malware detection events from Veeam Backup & Replication
- **VeeamSecurityComplianceAnalyzer_CL**: Security & Compliance Analyzer results collected from Veeam backup infrastructure components
- **VeeamAuthorizationEvents_CL**: Authorization and authentication events
- **VeeamOneTriggeredAlarms_CL**: Triggered alarms from Veeam ONE servers
- **VeeamCovewareFindings_CL**: Security findings from Coveware solution
- **VeeamSessions_CL**: Veeam sessions","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to Veeam APIs and pull data into Microsoft Sentinel custom tables. This may result in additional data ingestion costs. See the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": ""**STEP 1 - Select the deployment option for Veeam Data Connector and associated Azure Functions**\n\n>**IMPORTANT:** Before you deploy Veeam Data Connector, prepare Workspace Name (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceName""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Veeam data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FVeeam%2FData%2520Connectors%2Fazuredeploy_Veeam_API_FunctionApp.json)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Microsoft Sentinel Workspace Name**. \n4. Click **Review + Create**, **Create**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Veeam Infrastructure Access"", ""description"": ""Access to Veeam Backup & Replication REST API and Veeam ONE monitoring platform is required. This includes proper authentication credentials and network connectivity.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Veeam/Data%20Connectors/Veeam_API_FunctionApp.json","true" -"VeeamCovewareFindings_CL","Veeam","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Veeam","veeamsoftware","azure-sentinel-solution-veeamapp","2025-08-26","","3.0.2","Veeam Software","Partner","https://helpcenter.veeam.com/docs/security_plugins_microsoft_sentinel/guide/","","domains","VeeamCustomTablesDataConnector","Veeam","Veeam Data Connector (using Azure Functions)","Veeam Data Connector allows you to ingest Veeam telemetry data from multiple custom tables into Microsoft Sentinel.

The connector supports integration with Veeam Backup & Replication, Veeam ONE and Coveware platforms to provide comprehensive monitoring and security analytics. The data is collected through Azure Functions and stored in custom Log Analytics tables with dedicated Data Collection Rules (DCR) and Data Collection Endpoints (DCE).

**Custom Tables Included:**
- **VeeamMalwareEvents_CL**: Malware detection events from Veeam Backup & Replication
- **VeeamSecurityComplianceAnalyzer_CL**: Security & Compliance Analyzer results collected from Veeam backup infrastructure components
- **VeeamAuthorizationEvents_CL**: Authorization and authentication events
- **VeeamOneTriggeredAlarms_CL**: Triggered alarms from Veeam ONE servers
- **VeeamCovewareFindings_CL**: Security findings from Coveware solution
- **VeeamSessions_CL**: Veeam sessions","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to Veeam APIs and pull data into Microsoft Sentinel custom tables. This may result in additional data ingestion costs. See the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": ""**STEP 1 - Select the deployment option for Veeam Data Connector and associated Azure Functions**\n\n>**IMPORTANT:** Before you deploy Veeam Data Connector, prepare Workspace Name (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceName""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Veeam data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FVeeam%2FData%2520Connectors%2Fazuredeploy_Veeam_API_FunctionApp.json)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Microsoft Sentinel Workspace Name**. \n4. Click **Review + Create**, **Create**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Veeam Infrastructure Access"", ""description"": ""Access to Veeam Backup & Replication REST API and Veeam ONE monitoring platform is required. This includes proper authentication credentials and network connectivity.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Veeam/Data%20Connectors/Veeam_API_FunctionApp.json","true" -"VeeamMalwareEvents_CL","Veeam","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Veeam","veeamsoftware","azure-sentinel-solution-veeamapp","2025-08-26","","3.0.2","Veeam Software","Partner","https://helpcenter.veeam.com/docs/security_plugins_microsoft_sentinel/guide/","","domains","VeeamCustomTablesDataConnector","Veeam","Veeam Data Connector (using Azure Functions)","Veeam Data Connector allows you to ingest Veeam telemetry data from multiple custom tables into Microsoft Sentinel.

The connector supports integration with Veeam Backup & Replication, Veeam ONE and Coveware platforms to provide comprehensive monitoring and security analytics. The data is collected through Azure Functions and stored in custom Log Analytics tables with dedicated Data Collection Rules (DCR) and Data Collection Endpoints (DCE).

**Custom Tables Included:**
- **VeeamMalwareEvents_CL**: Malware detection events from Veeam Backup & Replication
- **VeeamSecurityComplianceAnalyzer_CL**: Security & Compliance Analyzer results collected from Veeam backup infrastructure components
- **VeeamAuthorizationEvents_CL**: Authorization and authentication events
- **VeeamOneTriggeredAlarms_CL**: Triggered alarms from Veeam ONE servers
- **VeeamCovewareFindings_CL**: Security findings from Coveware solution
- **VeeamSessions_CL**: Veeam sessions","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to Veeam APIs and pull data into Microsoft Sentinel custom tables. This may result in additional data ingestion costs. See the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": ""**STEP 1 - Select the deployment option for Veeam Data Connector and associated Azure Functions**\n\n>**IMPORTANT:** Before you deploy Veeam Data Connector, prepare Workspace Name (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceName""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Veeam data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FVeeam%2FData%2520Connectors%2Fazuredeploy_Veeam_API_FunctionApp.json)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Microsoft Sentinel Workspace Name**. \n4. Click **Review + Create**, **Create**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Veeam Infrastructure Access"", ""description"": ""Access to Veeam Backup & Replication REST API and Veeam ONE monitoring platform is required. This includes proper authentication credentials and network connectivity.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Veeam/Data%20Connectors/Veeam_API_FunctionApp.json","true" -"VeeamOneTriggeredAlarms_CL","Veeam","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Veeam","veeamsoftware","azure-sentinel-solution-veeamapp","2025-08-26","","3.0.2","Veeam Software","Partner","https://helpcenter.veeam.com/docs/security_plugins_microsoft_sentinel/guide/","","domains","VeeamCustomTablesDataConnector","Veeam","Veeam Data Connector (using Azure Functions)","Veeam Data Connector allows you to ingest Veeam telemetry data from multiple custom tables into Microsoft Sentinel.

The connector supports integration with Veeam Backup & Replication, Veeam ONE and Coveware platforms to provide comprehensive monitoring and security analytics. The data is collected through Azure Functions and stored in custom Log Analytics tables with dedicated Data Collection Rules (DCR) and Data Collection Endpoints (DCE).

**Custom Tables Included:**
- **VeeamMalwareEvents_CL**: Malware detection events from Veeam Backup & Replication
- **VeeamSecurityComplianceAnalyzer_CL**: Security & Compliance Analyzer results collected from Veeam backup infrastructure components
- **VeeamAuthorizationEvents_CL**: Authorization and authentication events
- **VeeamOneTriggeredAlarms_CL**: Triggered alarms from Veeam ONE servers
- **VeeamCovewareFindings_CL**: Security findings from Coveware solution
- **VeeamSessions_CL**: Veeam sessions","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to Veeam APIs and pull data into Microsoft Sentinel custom tables. This may result in additional data ingestion costs. See the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": ""**STEP 1 - Select the deployment option for Veeam Data Connector and associated Azure Functions**\n\n>**IMPORTANT:** Before you deploy Veeam Data Connector, prepare Workspace Name (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceName""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Veeam data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FVeeam%2FData%2520Connectors%2Fazuredeploy_Veeam_API_FunctionApp.json)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Microsoft Sentinel Workspace Name**. \n4. Click **Review + Create**, **Create**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Veeam Infrastructure Access"", ""description"": ""Access to Veeam Backup & Replication REST API and Veeam ONE monitoring platform is required. This includes proper authentication credentials and network connectivity.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Veeam/Data%20Connectors/Veeam_API_FunctionApp.json","true" -"VeeamSecurityComplianceAnalyzer_CL","Veeam","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Veeam","veeamsoftware","azure-sentinel-solution-veeamapp","2025-08-26","","3.0.2","Veeam Software","Partner","https://helpcenter.veeam.com/docs/security_plugins_microsoft_sentinel/guide/","","domains","VeeamCustomTablesDataConnector","Veeam","Veeam Data Connector (using Azure Functions)","Veeam Data Connector allows you to ingest Veeam telemetry data from multiple custom tables into Microsoft Sentinel.

The connector supports integration with Veeam Backup & Replication, Veeam ONE and Coveware platforms to provide comprehensive monitoring and security analytics. The data is collected through Azure Functions and stored in custom Log Analytics tables with dedicated Data Collection Rules (DCR) and Data Collection Endpoints (DCE).

**Custom Tables Included:**
- **VeeamMalwareEvents_CL**: Malware detection events from Veeam Backup & Replication
- **VeeamSecurityComplianceAnalyzer_CL**: Security & Compliance Analyzer results collected from Veeam backup infrastructure components
- **VeeamAuthorizationEvents_CL**: Authorization and authentication events
- **VeeamOneTriggeredAlarms_CL**: Triggered alarms from Veeam ONE servers
- **VeeamCovewareFindings_CL**: Security findings from Coveware solution
- **VeeamSessions_CL**: Veeam sessions","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to Veeam APIs and pull data into Microsoft Sentinel custom tables. This may result in additional data ingestion costs. See the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": ""**STEP 1 - Select the deployment option for Veeam Data Connector and associated Azure Functions**\n\n>**IMPORTANT:** Before you deploy Veeam Data Connector, prepare Workspace Name (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceName""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Veeam data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FVeeam%2FData%2520Connectors%2Fazuredeploy_Veeam_API_FunctionApp.json)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Microsoft Sentinel Workspace Name**. \n4. Click **Review + Create**, **Create**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Veeam Infrastructure Access"", ""description"": ""Access to Veeam Backup & Replication REST API and Veeam ONE monitoring platform is required. This includes proper authentication credentials and network connectivity.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Veeam/Data%20Connectors/Veeam_API_FunctionApp.json","true" -"VeeamSessions_CL","Veeam","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Veeam","veeamsoftware","azure-sentinel-solution-veeamapp","2025-08-26","","3.0.2","Veeam Software","Partner","https://helpcenter.veeam.com/docs/security_plugins_microsoft_sentinel/guide/","","domains","VeeamCustomTablesDataConnector","Veeam","Veeam Data Connector (using Azure Functions)","Veeam Data Connector allows you to ingest Veeam telemetry data from multiple custom tables into Microsoft Sentinel.

The connector supports integration with Veeam Backup & Replication, Veeam ONE and Coveware platforms to provide comprehensive monitoring and security analytics. The data is collected through Azure Functions and stored in custom Log Analytics tables with dedicated Data Collection Rules (DCR) and Data Collection Endpoints (DCE).

**Custom Tables Included:**
- **VeeamMalwareEvents_CL**: Malware detection events from Veeam Backup & Replication
- **VeeamSecurityComplianceAnalyzer_CL**: Security & Compliance Analyzer results collected from Veeam backup infrastructure components
- **VeeamAuthorizationEvents_CL**: Authorization and authentication events
- **VeeamOneTriggeredAlarms_CL**: Triggered alarms from Veeam ONE servers
- **VeeamCovewareFindings_CL**: Security findings from Coveware solution
- **VeeamSessions_CL**: Veeam sessions","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to Veeam APIs and pull data into Microsoft Sentinel custom tables. This may result in additional data ingestion costs. See the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": ""**STEP 1 - Select the deployment option for Veeam Data Connector and associated Azure Functions**\n\n>**IMPORTANT:** Before you deploy Veeam Data Connector, prepare Workspace Name (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceName""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Veeam data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FVeeam%2FData%2520Connectors%2Fazuredeploy_Veeam_API_FunctionApp.json)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Microsoft Sentinel Workspace Name**. \n4. Click **Review + Create**, **Create**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Veeam Infrastructure Access"", ""description"": ""Access to Veeam Backup & Replication REST API and Veeam ONE monitoring platform is required. This includes proper authentication credentials and network connectivity.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Veeam/Data%20Connectors/Veeam_API_FunctionApp.json","true" -"","Veritas NetBackup","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Veritas%20NetBackup","veritas","veritas-sentinel","2023-09-25","","","Veritas Technologies LLC","Partner","https://www.veritas.com/content/support/en_US/contact-us","","domains","","","","","","","false","","false" -"","VirtualMetric DataStream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VirtualMetric%20DataStream","virtualmetric","azure-sentinel-solution-virtualmetric-datastream","2025-09-15","","","VirtualMetric","Partner","https://support.virtualmetric.com","VirtualMetric","domains","","","","","","","false","","false" -"CommonSecurityLog","VirtualMetric DataStream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VirtualMetric%20DataStream","virtualmetric","azure-sentinel-solution-virtualmetric-datastream","2025-09-15","","","VirtualMetric","Partner","https://support.virtualmetric.com","VirtualMetric","domains","VirtualMetricDirectorProxy","VirtualMetric","VirtualMetric Director Proxy","VirtualMetric Director Proxy deploys an Azure Function App to securely bridge VirtualMetric DataStream with Azure services including Microsoft Sentinel, Azure Data Explorer, and Azure Storage.","[{""title"": ""Deploy VirtualMetric Director Proxy"", ""description"": ""Deploy the Azure Function App that serves as a secure proxy between VirtualMetric DataStream and Microsoft Sentinel."", ""instructions"": [{""type"": ""InstructionStepsGroup"", ""parameters"": {""enable"": true, ""instructionSteps"": [{""title"": ""Prerequisites and Deployment Order"", ""description"": ""**Recommended Deployment Order:**\n\nFor optimal configuration, consider deploying the target connectors first:\n\n1. **Deploy Microsoft Sentinel Connector**: Deploy the VirtualMetric DataStream for Microsoft Sentinel connector first to create the required Data Collection Endpoints and Rules.\n\n2. **Deploy Microsoft Sentinel data lake Connector** (optional): If using Microsoft Sentinel data lake tables, deploy the VirtualMetric DataStream for Microsoft Sentinel data lake connector.\n\n3. **Deploy Director Proxy** (this step): The Director Proxy can then be configured with your Microsoft Sentinel targets.\n\n**Note:** This order is recommended but not required. You can deploy the Director Proxy independently and configure it with your targets later.""}, {""title"": ""Deploy Azure Function App"", ""description"": ""Deploy the VirtualMetric Director Proxy Azure Function App using the Deploy to Azure button.\n\n1. **Deploy to Azure**:\n - Click the Deploy to Azure button below to deploy the Function App:\n - [![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FVirtualMetric%2520DataStream%2FData%2520Connectors%2FVirtualMetric-DirectorProxy%2FDeployToAzure.json)\n\n2. **Configure Deployment Parameters**:\n - **Subscription**: Select your Azure subscription\n - **Resource Group**: Choose the same resource group as your Microsoft Sentinel workspace or create a new one\n - **Region**: Select the Azure region (should match your Microsoft Sentinel workspace region)\n - **Function App Name**: Provide a unique name for the Function App (e.g., \""vmetric-director-proxy\"")\n\n3. **Complete Deployment**:\n - Click **Review + create** to validate the parameters\n - Click **Create** to deploy the Function App\n - Wait for deployment to complete (typically 3-5 minutes)\n - Note the Function App URL: `https://.azurewebsites.net`""}, {""title"": ""Configure Function App Permissions"", ""description"": ""Assign the necessary permissions to the Function App's managed identity to access Microsoft Sentinel resources.\n\n1. **Enable System-Assigned Managed Identity**:\n - Navigate to your deployed Function App in Azure Portal\n - Go to **Identity** under Settings\n - Toggle **Status** to **On** for System assigned identity\n - Click **Save** and confirm\n\n2. **Navigate to Resource Group**:\n - Go to the resource group containing your Microsoft Sentinel workspace and Data Collection Endpoints\n\n3. **Assign Required Roles**:\n - Open **Access control (IAM)**\n - Click **+ Add** > **Add role assignment**\n - Assign the following roles to the Function App's system-assigned managed identity:\n - **Monitoring Metrics Publisher**: For sending data to Data Collection Endpoints\n - **Monitoring Reader**: For reading Data Collection Rules configuration\n\n4. **Select the Function App Identity**:\n - In **Members** tab, select **Managed identity**\n - Choose **Function App** and select your deployed Director Proxy Function App\n - Complete the role assignment\n\n5. **Get Function App Access Token** (Optional for Function Key authentication):\n - Navigate to your Function App\n - Go to **App keys** under Functions\n - Copy the default host key or create a new function key for authentication""}, {""title"": ""Configure VirtualMetric DataStream Integration"", ""description"": ""Set up VirtualMetric DataStream to send security telemetry to Microsoft Sentinel through the Director Proxy.\n\n1. **Access VirtualMetric DataStream Configuration**:\n - Log into your **VirtualMetric DataStream** management console\n - Navigate to **Targets** section\n - Click **Microsoft Sentinel Targets**\n - Click **Add new target** or edit an existing Microsoft Sentinel target\n\n2. **Configure General Settings**:\n - **Name**: Enter a name for your target (e.g., \""sentinel-with-proxy\"")\n - **Description**: Optionally provide a description for the target configuration\n\n3. **Configure Azure Authentication**:\n \n **For Service Principal Authentication:**\n - **Managed Identity for Azure**: Keep **Disabled**\n - **Tenant ID**: Enter your Azure Active Directory tenant ID\n - **Client ID**: Enter your service principal application ID\n - **Client Secret**: Enter your service principal client secret\n \n **For Azure Managed Identity:**\n - **Managed Identity for Azure**: Set to **Enabled**\n\n4. **Configure Director Proxy** (in Azure Properties tab):\n - **Endpoint Address**: Enter the Function App URL from Step 2 (format: `https://.azurewebsites.net`)\n - **Access Token**: Enter the Function App host key from Step 3 (optional if using Managed Identity)\n\n5. **Configure Stream Properties**:\n - **Endpoint**: Enter the DCE Logs Ingestion URI (format: `https://..ingest.monitor.azure.com`)\n - **Streams**: Select **Auto** for automatic stream detection, or configure specific streams if needed\n\n6. **Verify Data Ingestion in Microsoft Sentinel**:\n - Return to your **Log Analytics Workspace**\n - Run sample queries to confirm data is being received:\n ```kql\n CommonSecurityLog\n | where TimeGenerated > ago(1h)\n | take 10\n ```\n - Check the **Microsoft Sentinel Overview** dashboard for new data sources and event counts""}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": false}}], ""customs"": [{""name"": ""Azure Function App"", ""description"": ""An Azure Function App must be deployed to host the Director Proxy. Requires read, write, and delete permissions on Microsoft.Web/sites resources within your resource group to create and manage the Function App.""}, {""name"": ""VirtualMetric DataStream Configuration"", ""description"": ""You need VirtualMetric DataStream configured with authentication credentials to connect to the Director Proxy. The Director Proxy acts as a secure bridge between VirtualMetric DataStream and Azure services.""}, {""name"": ""Target Azure Services"", ""description"": ""Configure your target Azure services such as Microsoft Sentinel Data Collection Endpoints, Azure Data Explorer clusters, or Azure Storage accounts where the Director Proxy will forward data.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VirtualMetric%20DataStream/Data%20Connectors/VirtualMetric-DirectorProxy/Template_DirectorProxy.json","true" -"CommonSecurityLog","VirtualMetric DataStream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VirtualMetric%20DataStream","virtualmetric","azure-sentinel-solution-virtualmetric-datastream","2025-09-15","","","VirtualMetric","Partner","https://support.virtualmetric.com","VirtualMetric","domains","VirtualMetricMSSentinelConnector","VirtualMetric","VirtualMetric DataStream for Microsoft Sentinel","VirtualMetric DataStream connector deploys Data Collection Rules to ingest security telemetry into Microsoft Sentinel.","[{""title"": ""Configure VirtualMetric DataStream for Microsoft Sentinel"", ""description"": ""Configure the VirtualMetric DataStream for Microsoft Sentinel to send data."", ""instructions"": [{""type"": ""InstructionStepsGroup"", ""parameters"": {""enable"": true, ""instructionSteps"": [{""title"": ""Register Application in Microsoft Entra ID (Optional)"", ""description"": ""**Choose your authentication method:**\n\n**Option A: Use Azure Managed Identity (Recommended)**\n- Skip this step if you plan to use Azure Managed Identity for authentication.\n- Azure Managed Identity provides a more secure authentication method without managing credentials.\n\n**Option B: Register a Service Principal Application**\n\n1. **Open the [Microsoft Entra ID page](https://entra.microsoft.com/)**:\n - Click the provided link to open the **Microsoft Entra ID** registration page in a new tab.\n - Ensure you are logged in with an account that has **Application Administrator** or **Global Administrator** permissions.\n\n2. **Create a New Application**:\n - In the **Microsoft Entra ID portal**, select **App registrations** from the left-hand navigation.\n - Click on **+ New registration**.\n - Fill out the following fields:\n - **Name**: Enter a descriptive name for the app (e.g., \""VirtualMetric ASIM Connector\"").\n - **Supported account types**: Choose **Accounts in this organizational directory only** (Single tenant).\n - **Redirect URI**: Leave this blank.\n - Click **Register** to create the application.\n\n3. **Copy Application and Tenant IDs**:\n - Once the app is registered, note the **Application (client) ID** and **Directory (tenant) ID** from the **Overview** page. You'll need these for VirtualMetric DataStream configuration.\n\n4. **Create a Client Secret**:\n - In the **Certificates & secrets** section, click **+ New client secret**.\n - Add a description (e.g., 'VirtualMetric ASIM Secret') and set an appropriate expiration period.\n - Click **Add**.\n - **Copy the client secret value immediately**, as it will not be shown again. Store this securely for VirtualMetric DataStream configuration.""}, {""title"": ""Assign Required Permissions"", ""description"": ""Assign the required roles to your chosen authentication method (Service Principal or Managed Identity) in the resource group.\n\n**For Service Principal (if you completed Step 1):**\n\n1. **Navigate to Your Resource Group**:\n - Open the **Azure Portal** and navigate to the **Resource Group** that contains your **Log Analytics Workspace** and where **Data Collection Rules (DCRs)** will be deployed.\n\n2. **Assign the Monitoring Metrics Publisher Role**:\n - In the **Resource Group**, click on **Access control (IAM)** from the left-hand menu.\n - Click **+ Add** and select **Add role assignment**.\n - In the **Role** tab, search for and select **Monitoring Metrics Publisher**.\n - Click **Next** to go to the **Members** tab.\n - Under **Assign access to**, select **User, group, or service principal**.\n - Click **+ Select members** and search for your registered application by name or client ID.\n - Select your application and click **Select**.\n - Click **Review + assign** twice to complete the assignment.\n\n3. **Assign the Monitoring Reader Role**:\n - Repeat the same process to assign the **Monitoring Reader** role:\n - Click **+ Add** and select **Add role assignment**.\n - In the **Role** tab, search for and select **Monitoring Reader**.\n - Follow the same member selection process as above.\n - Click **Review + assign** twice to complete the assignment.\n\n**For Azure Managed Identity:**\n\n1. **Create or Identify Your Managed Identity**:\n - If using **System-assigned Managed Identity**: Enable it on your Azure resource (VM, App Service, etc.).\n - If using **User-assigned Managed Identity**: Create one in your resource group if it doesn't exist.\n\n2. **Assign the Monitoring Metrics Publisher Role**:\n - Follow the same steps as above, but in the **Members** tab:\n - Under **Assign access to**, select **Managed identity**.\n - Click **+ Select members** and choose the appropriate managed identity type and select your identity.\n - Click **Select**, then **Review + assign** twice to complete.\n\n3. **Assign the Monitoring Reader Role**:\n - Repeat the process to assign the **Monitoring Reader** role to the same managed identity.\n\n**Required Permission Summary:**\nThe assigned roles provide the following capabilities:\n- **Monitoring Metrics Publisher**: Write data to Data Collection Endpoints (DCE) and send telemetry through Data Collection Rules (DCR)\n- **Monitoring Reader**: Read stream configuration and access Log Analytics workspace for ASIM table ingestion""}, {""title"": ""Deploy Azure Infrastructure"", ""description"": ""Deploy the required Data Collection Endpoint (DCE) and Data Collection Rules (DCR) for Microsoft Sentinel tables using our ARM template.\n\n1. **Deploy to Azure**:\n - Click the Deploy to Azure button below to automatically deploy the required infrastructure:\n - [![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FVirtualMetric%2520DataStream%2FData%2520Connectors%2FVirtualMetric-Sentinel%2FDeployToAzure.json)\n - This will take you directly to the Azure portal to start the deployment.\n\n2. **Configure Deployment Parameters**:\n - On the custom deployment page, configure the following settings:\n \n **Project details:**\n - **Subscription**: Select your Azure subscription from the dropdown\n - **Resource group**: Select an existing resource group or click **Create new** to create a new one\n \n **Instance details:**\n - **Region**: Select the Azure region where your Log Analytics workspace is located (e.g., West Europe)\n - **Workspace**: Enter your Log Analytics workspace name\n - **DCE Name**: Provide a name for the Data Collection Endpoint (e.g., \""vmetric-dce\"")\n - **DCR Name Prefix**: Provide a prefix for the Data Collection Rules (e.g., \""vmetric-dcr\"")\n\n3. **Complete the Deployment**:\n - Click **Review + create** to validate the template.\n - Review the parameters and click **Create** to deploy the resources.\n - Wait for the deployment to complete (typically takes 2-5 minutes).\n\n4. **Verify Deployed Resources**:\n - After deployment, verify the following resources were created:\n - **Data Collection Endpoint (DCE)**: Check **Azure Portal > Monitor > Data Collection Endpoints**\n - **Data Collection Rules (DCRs)**: Check **Azure Portal > Monitor > Data Collection Rules**\n - **Copy the DCE Logs Ingestion URI** from the DCE **Overview** page (format: `https://..ingest.monitor.azure.com`)\n - **Copy the DCE Resource ID** from the DCE **Overview** page (format: `/subscriptions//resourceGroups//providers/Microsoft.Insights/dataCollectionEndpoints/`)\n - For each DCR, note the **Immutable ID** from the **Overview** page - you'll need these for VirtualMetric DataStream configuration.""}, {""title"": ""Configure VirtualMetric DataStream Integration"", ""description"": ""Set up VirtualMetric DataStream to send security telemetry to Microsoft Sentinel tables.\n\n1. **Access VirtualMetric DataStream Configuration**:\n - Log into your **VirtualMetric DataStream** management console.\n - Navigate to **Fleet Management** > **Targets** section.\n - Click **Add new target** button.\n - Select **Microsoft Sentinel** target.\n\n2. **Configure General Settings**:\n - **Name**: Enter a name for your target (e.g., \""cus01-ms-sentinel\"")\n - **Description**: Optionally provide a description for the target configuration\n\n3. **Configure Azure Authentication** (choose based on Step 1):\n \n **For Service Principal Authentication:**\n - **Managed Identity for Azure**: Keep **Disabled**\n - **Tenant ID**: Enter the Directory (tenant) ID from Step 1\n - **Client ID**: Enter the Application (client) ID from Step 1\n - **Client Secret**: Enter the client secret value from Step 1\n \n **For Azure Managed Identity:**\n - **Managed Identity for Azure**: Set to **Enabled**\n\n4. **Configure Stream Properties**:\n - **Endpoint**: Choose your configuration method:\n - **For manual stream configuration**: Enter the DCE Logs Ingestion URI (format: `https://..ingest.monitor.azure.com`)\n - **For auto stream detection**: Enter the DCE Resource ID (format: `/subscriptions//resourceGroups//providers/Microsoft.Insights/dataCollectionEndpoints/`)\n - **Streams**: Select **Auto** for automatic stream detection, or configure specific streams if needed\n\n5. **Verify Data Ingestion in Microsoft Sentinel**:\n - Return to your **Log Analytics Workspace**\n - Run sample queries on the ASIM tables to confirm data is being received:\n ```kql\n ASimNetworkSessionLogs\n | where TimeGenerated > ago(1h)\n | take 10\n ```\n - Check the **Microsoft Sentinel Overview** dashboard for new data sources and event counts.""}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": false}}], ""customs"": [{""name"": ""App Registration or Azure Managed Identity"", ""description"": ""VirtualMetric DataStream requires an Entra ID identity to authenticate and send logs to Microsoft Sentinel. You can choose between creating an App Registration with Client ID and Client Secret, or using Azure Managed Identity for enhanced security without credential management.""}, {""name"": ""Resource Group Role Assignment"", ""description"": ""The chosen identity (App Registration or Managed Identity) must be assigned to the resource group containing the Data Collection Endpoint with the following roles: Monitoring Metrics Publisher (for log ingestion) and Monitoring Reader (for reading stream configuration).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VirtualMetric%20DataStream/Data%20Connectors/VirtualMetric-Sentinel/Template_Sentinel.json","true" -"CommonSecurityLog","VirtualMetric DataStream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VirtualMetric%20DataStream","virtualmetric","azure-sentinel-solution-virtualmetric-datastream","2025-09-15","","","VirtualMetric","Partner","https://support.virtualmetric.com","VirtualMetric","domains","VirtualMetricMSSentinelDataLakeConnector","VirtualMetric","VirtualMetric DataStream for Microsoft Sentinel data lake","VirtualMetric DataStream connector deploys Data Collection Rules to ingest security telemetry into Microsoft Sentinel data lake.","[{""title"": ""Configure VirtualMetric DataStream for Microsoft Sentinel data lake"", ""description"": ""Configure the VirtualMetric DataStream for Microsoft Sentinel data lake to send data."", ""instructions"": [{""type"": ""InstructionStepsGroup"", ""parameters"": {""enable"": true, ""instructionSteps"": [{""title"": ""Register Application in Microsoft Entra ID (Optional)"", ""description"": ""**Choose your authentication method:**\n\n**Option A: Use Azure Managed Identity (Recommended)**\n- Skip this step if you plan to use Azure Managed Identity for authentication.\n- Azure Managed Identity provides a more secure authentication method without managing credentials.\n\n**Option B: Register a Service Principal Application**\n\n1. **Open the [Microsoft Entra ID page](https://entra.microsoft.com/)**:\n - Click the provided link to open the **Microsoft Entra ID** registration page in a new tab.\n - Ensure you are logged in with an account that has **Application Administrator** or **Global Administrator** permissions.\n\n2. **Create a New Application**:\n - In the **Microsoft Entra ID portal**, select **App registrations** from the left-hand navigation.\n - Click on **+ New registration**.\n - Fill out the following fields:\n - **Name**: Enter a descriptive name for the app (e.g., \""VirtualMetric ASIM Connector\"").\n - **Supported account types**: Choose **Accounts in this organizational directory only** (Single tenant).\n - **Redirect URI**: Leave this blank.\n - Click **Register** to create the application.\n\n3. **Copy Application and Tenant IDs**:\n - Once the app is registered, note the **Application (client) ID** and **Directory (tenant) ID** from the **Overview** page. You'll need these for VirtualMetric DataStream configuration.\n\n4. **Create a Client Secret**:\n - In the **Certificates & secrets** section, click **+ New client secret**.\n - Add a description (e.g., 'VirtualMetric ASIM Secret') and set an appropriate expiration period.\n - Click **Add**.\n - **Copy the client secret value immediately**, as it will not be shown again. Store this securely for VirtualMetric DataStream configuration.""}, {""title"": ""Assign Required Permissions"", ""description"": ""Assign the required roles to your chosen authentication method (Service Principal or Managed Identity) in the resource group.\n\n**For Service Principal (if you completed Step 1):**\n\n1. **Navigate to Your Resource Group**:\n - Open the **Azure Portal** and navigate to the **Resource Group** that contains your **Log Analytics Workspace** and where **Data Collection Rules (DCRs)** will be deployed.\n\n2. **Assign the Monitoring Metrics Publisher Role**:\n - In the **Resource Group**, click on **Access control (IAM)** from the left-hand menu.\n - Click **+ Add** and select **Add role assignment**.\n - In the **Role** tab, search for and select **Monitoring Metrics Publisher**.\n - Click **Next** to go to the **Members** tab.\n - Under **Assign access to**, select **User, group, or service principal**.\n - Click **+ Select members** and search for your registered application by name or client ID.\n - Select your application and click **Select**.\n - Click **Review + assign** twice to complete the assignment.\n\n3. **Assign the Monitoring Reader Role**:\n - Repeat the same process to assign the **Monitoring Reader** role:\n - Click **+ Add** and select **Add role assignment**.\n - In the **Role** tab, search for and select **Monitoring Reader**.\n - Follow the same member selection process as above.\n - Click **Review + assign** twice to complete the assignment.\n\n**For Azure Managed Identity:**\n\n1. **Create or Identify Your Managed Identity**:\n - If using **System-assigned Managed Identity**: Enable it on your Azure resource (VM, App Service, etc.).\n - If using **User-assigned Managed Identity**: Create one in your resource group if it doesn't exist.\n\n2. **Assign the Monitoring Metrics Publisher Role**:\n - Follow the same steps as above, but in the **Members** tab:\n - Under **Assign access to**, select **Managed identity**.\n - Click **+ Select members** and choose the appropriate managed identity type and select your identity.\n - Click **Select**, then **Review + assign** twice to complete.\n\n3. **Assign the Monitoring Reader Role**:\n - Repeat the process to assign the **Monitoring Reader** role to the same managed identity.\n\n**Required Permission Summary:**\nThe assigned roles provide the following capabilities:\n- **Monitoring Metrics Publisher**: Write data to Data Collection Endpoints (DCE) and send telemetry through Data Collection Rules (DCR)\n- **Monitoring Reader**: Read stream configuration and access Log Analytics workspace for ASIM table ingestion""}, {""title"": ""Deploy Azure Infrastructure"", ""description"": ""Deploy the required Data Collection Endpoint (DCE) and Data Collection Rules (DCR) for Microsoft Sentinel data lake tables using our ARM template.\n\n1. **Deploy to Azure**:\n - Click the Deploy to Azure button below to automatically deploy the required infrastructure:\n - [![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FVirtualMetric%2520DataStream%2FData%2520Connectors%2FVirtualMetric-SentinelDataLake%2FDeployToAzure.json)\n - This will take you directly to the Azure portal to start the deployment.\n\n2. **Configure Deployment Parameters**:\n - On the custom deployment page, configure the following settings:\n \n **Project details:**\n - **Subscription**: Select your Azure subscription from the dropdown\n - **Resource group**: Select an existing resource group or click **Create new** to create a new one\n \n **Instance details:**\n - **Region**: Select the Azure region where your Log Analytics workspace is located (e.g., West Europe)\n - **Workspace**: Enter your Log Analytics workspace name\n - **DCE Name**: Provide a name for the Data Collection Endpoint (e.g., \""vmetric-dce\"")\n - **DCR Name Prefix**: Provide a prefix for the Data Collection Rules (e.g., \""vmetric-dcr\"")\n\n3. **Complete the Deployment**:\n - Click **Review + create** to validate the template.\n - Review the parameters and click **Create** to deploy the resources.\n - Wait for the deployment to complete (typically takes 2-5 minutes).\n\n4. **Verify Deployed Resources**:\n - After deployment, verify the following resources were created:\n - **Data Collection Endpoint (DCE)**: Check **Azure Portal > Monitor > Data Collection Endpoints**\n - **Data Collection Rules (DCRs)**: Check **Azure Portal > Monitor > Data Collection Rules**\n - **Copy the DCE Logs Ingestion URI** from the DCE **Overview** page (format: `https://..ingest.monitor.azure.com`)\n - **Copy the DCE Resource ID** from the DCE **Overview** page (format: `/subscriptions//resourceGroups//providers/Microsoft.Insights/dataCollectionEndpoints/`)\n - For each DCR, note the **Immutable ID** from the **Overview** page - you'll need these for VirtualMetric DataStream configuration.""}, {""title"": ""Configure VirtualMetric DataStream Integration"", ""description"": ""Set up VirtualMetric DataStream to send security telemetry to Microsoft Sentinel data lake tables.\n\n1. **Access VirtualMetric DataStream Configuration**:\n - Log into your **VirtualMetric DataStream** management console.\n - Navigate to **Fleet Management** > **Targets** section.\n - Click **Add new target** button.\n - Select **Microsoft Sentinel** target.\n\n2. **Configure General Settings**:\n - **Name**: Enter a name for your target (e.g., \""cus01-ms-sentinel\"")\n - **Description**: Optionally provide a description for the target configuration\n\n3. **Configure Azure Authentication** (choose based on Step 1):\n \n **For Service Principal Authentication:**\n - **Managed Identity for Azure**: Keep **Disabled**\n - **Tenant ID**: Enter the Directory (tenant) ID from Step 1\n - **Client ID**: Enter the Application (client) ID from Step 1\n - **Client Secret**: Enter the client secret value from Step 1\n \n **For Azure Managed Identity:**\n - **Managed Identity for Azure**: Set to **Enabled**\n\n4. **Configure Stream Properties**:\n - **Endpoint**: Choose your configuration method:\n - **For manual stream configuration**: Enter the DCE Logs Ingestion URI (format: `https://..ingest.monitor.azure.com`)\n - **For auto stream detection**: Enter the DCE Resource ID (format: `/subscriptions//resourceGroups//providers/Microsoft.Insights/dataCollectionEndpoints/`)\n - **Streams**: Select **Auto** for automatic stream detection, or configure specific streams if needed\n\n5. **Verify Data Ingestion in Microsoft Sentinel data lake**:\n - Return to your **Log Analytics Workspace**\n - Run sample queries on the ASIM tables to confirm data is being received:\n ```kql\n ASimNetworkSessionLogs\n | where TimeGenerated > ago(1h)\n | take 10\n ```\n - Check the **Microsoft Sentinel Overview** dashboard for new data sources and event counts.""}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": false}}], ""customs"": [{""name"": ""App Registration or Azure Managed Identity"", ""description"": ""VirtualMetric DataStream requires an Entra ID identity to authenticate and send logs to Microsoft Sentinel data lake. You can choose between creating an App Registration with Client ID and Client Secret, or using Azure Managed Identity for enhanced security without credential management.""}, {""name"": ""Resource Group Role Assignment"", ""description"": ""The chosen identity (App Registration or Managed Identity) must be assigned to the resource group containing the Data Collection Endpoint with the following roles: Monitoring Metrics Publisher (for log ingestion) and Monitoring Reader (for reading stream configuration).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VirtualMetric%20DataStream/Data%20Connectors/VirtualMetric-SentinelDataLake/Template_SentinelDataLake.json","true" -"","VirusTotal","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VirusTotal","azuresentinel","azure-sentinel-solution-virustotal","2022-07-31","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"","Votiro","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Votiro","votirocybersecltd1670174946024","votiro_data_connector","","","","Votiro","Partner","https://support.votiro.com/","","domains","","","","","","","false","","false" -"CommonSecurityLog","Votiro","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Votiro","votirocybersecltd1670174946024","votiro_data_connector","","","","Votiro","Partner","https://support.votiro.com/","","domains","Votiro","Votiro","[Deprecated] Votiro Sanitization Engine Logs","The Votiro data connector allows you to easily connect your Votiro Event logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. Using Votiro on Microsoft Sentinel will provide you more insights into the sanitization results of files.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Set Votiro Endpoints to send Syslog messages in CEF format to the Forwarder machine. Make sure you to send the logs to port 514 TCP on the Forwarder machine's IP address.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Votiro/Data%20Connectors/VotiroEvents.json","true" -"","Watchguard Firebox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Watchguard%20Firebox","watchguard-technologies","watchguard_firebox_mss","2022-05-06","","","WatchGuard","Partner","https://www.watchguard.com/wgrd-support/contact-support","","domains","","","","","","","false","","false" -"Syslog","Watchguard Firebox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Watchguard%20Firebox","watchguard-technologies","watchguard_firebox_mss","2022-05-06","","","WatchGuard","Partner","https://www.watchguard.com/wgrd-support/contact-support","","domains","WatchguardFirebox","WatchGuard Technologies","[Deprecated] WatchGuard Firebox","WatchGuard Firebox (https://www.watchguard.com/wgrd-products/firewall-appliances and https://www.watchguard.com/wgrd-products/cloud-and-virtual-firewalls) is security products/firewall-appliances. Watchguard Firebox will send syslog to Watchguard Firebox collector agent.The agent then sends the message to the workspace.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias WatchGuardFirebox and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Watchguard%20Firebox/Parsers/WatchGuardFirebox.txt) on the second line of the query, enter the hostname(s) of your WatchGuard Firebox device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n2. Select **Apply below configuration to my machines** and select the facilities and severities.\n3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Watchguard%20Firebox/Data%20Connectors/Connector_syslog_WatchGuardFirebox.json","true" -"","Watchlists Utilities","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Watchlists%20Utilities","azuresentinel","azure-sentinel-solution-watchlistsutilities","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"","Web Session Essentials","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Web%20Session%20Essentials","azuresentinel","azure-sentinel-solution-websession-domain","2023-06-29","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"","Web Shells Threat Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Web%20Shells%20Threat%20Protection","azuresentinel","azure-sentinel-solution-webshellsthreatprotection","2022-05-22","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"","Windows Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Windows%20Firewall","azuresentinel","azure-sentinel-solution-windowsfirewall","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"WindowsFirewall","Windows Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Windows%20Firewall","azuresentinel","azure-sentinel-solution-windowsfirewall","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","WindowsFirewall","Microsoft","Windows Firewall","Windows Firewall is a Microsoft Windows application that filters information coming to your system from the Internet and blocking potentially harmful programs. The software blocks most programs from communicating through the firewall. Users simply add a program to the list of allowed programs to allow it to communicate through the firewall. When using a public network, Windows Firewall can also secure the system by blocking all unsolicited attempts to connect to your computer. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2219791&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""1. Download and install the agent"", ""description"": ""> Windows Firewall logs are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on non-Azure Windows Machine"", ""description"": ""Select the machine to install the agent and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Install Windows Firewall solution"", ""instructions"": [{""parameters"": {""solutionName"": ""WindowsFirewall""}, ""type"": ""OmsSolutions""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/solutions"", ""permissionsDisplayText"": ""[read and write permissions](https://docs.microsoft.com/azure/role-based-access-control/built-in-roles#log-analytics-contributor)."", ""providerDisplayName"": ""Solutions"", ""scope"": ""ResourceGroup"", ""requiredPermissions"": {""read"": true, ""write"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Windows%20Firewall/Data%20Connectors/Windows%20Firewall.JSON","true" -"","Windows Forwarded Events","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Windows%20Forwarded%20Events","azuresentinel","azure-sentinel-solution-windowsforwardedevents","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"WindowsEvent","Windows Forwarded Events","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Windows%20Forwarded%20Events","azuresentinel","azure-sentinel-solution-windowsforwardedevents","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","WindowsForwardedEvents","Microsoft","Windows Forwarded Events","You can stream all Windows Event Forwarding (WEF) logs from the Windows Servers connected to your Microsoft Sentinel workspace using Azure Monitor Agent (AMA).
This connection enables you to view dashboards, create custom alerts, and improve investigation.
This gives you more insight into your organization’s network and improves your security operation capabilities. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2219963&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Enable data collection rule\u200b"", ""description"": ""> Windows Forwarded Events logs are collected only from **Windows** agents."", ""instructions"": [{""type"": ""WindowsForwardedEvents""}, {""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 1}, ""type"": ""InstallAgent""}, {""parameters"": {""linkType"": ""OpenCustomDeploymentBlade"", ""dataCollectionRuleType"": 1}, ""type"": ""InstallAgent""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces/datasources"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace data sources"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Windows%20Forwarded%20Events/Data%20Connectors/WindowsForwardedEvents.JSON","true" -"","Windows Security Events","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Windows%20Security%20Events","azuresentinel","azure-sentinel-solution-securityevents","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"SecurityEvent","Windows Security Events","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Windows%20Security%20Events","azuresentinel","azure-sentinel-solution-securityevents","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SecurityEvents","Microsoft","Security Events via Legacy Agent","You can stream all security events from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization’s network and improves your security operation capabilities. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220093&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""1. Download and install the agent"", ""description"": ""> Security Events logs are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on non-Azure Windows Machine"", ""description"": ""Select the machine to install the agent and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Select which events to stream"", ""description"": ""- All events - All Windows security and AppLocker events.\n- Common - A standard set of events for auditing purposes.\n- Minimal - A small set of events that might indicate potential threats. By enabling this option, you won't be able to have a full audit trail.\n- None - No security or AppLocker events."", ""instructions"": [{""type"": ""SecurityEvents""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/datasources"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace data sources"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Windows%20Security%20Events/Data%20Connectors/template_SecurityEvents.JSON","true" -"SecurityEvent","Windows Security Events","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Windows%20Security%20Events","azuresentinel","azure-sentinel-solution-securityevents","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","WindowsSecurityEvents","Microsoft","Windows Security Events via AMA","You can stream all security events from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization’s network and improves your security operation capabilities. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220225&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Enable data collection rule\u200b"", ""description"": ""> Security Events logs are collected only from **Windows** agents."", ""instructions"": [{""type"": ""WindowsSecurityEvents""}]}, {""instructions"": [{""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 0}, ""type"": ""InstallAgent""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces/datasources"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace data sources"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Windows%20Security%20Events/Data%20Connectors/template_WindowsSecurityEvents.JSON","true" -"","Windows Server DNS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Windows%20Server%20DNS","azuresentinel","azure-sentinel-solution-dns","2022-05-11","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"DnsInventory","Windows Server DNS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Windows%20Server%20DNS","azuresentinel","azure-sentinel-solution-dns","2022-05-11","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","DNS","Microsoft","DNS","The DNS log connector allows you to easily connect your DNS analytic and audit logs with Microsoft Sentinel, and other related data, to improve investigation.

**When you enable DNS log collection you can:**
- Identify clients that try to resolve malicious domain names.
- Identify stale resource records.
- Identify frequently queried domain names and talkative DNS clients.
- View request load on DNS servers.
- View dynamic DNS registration failures.

For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2220127&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""1. Download and install the agent"", ""description"": ""> DNS logs are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on non-Azure Windows Machine"", ""description"": ""Select the machine to install the agent and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Install DNS solution"", ""instructions"": [{""parameters"": {""solutionName"": ""DnsAnalytics""}, ""type"": ""OmsSolutions""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/solutions"", ""permissionsDisplayText"": ""[read and write permissions](https://docs.microsoft.com/azure/role-based-access-control/built-in-roles#log-analytics-contributor)."", ""providerDisplayName"": ""Solutions"", ""scope"": ""ResourceGroup"", ""requiredPermissions"": {""read"": true, ""write"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Windows%20Server%20DNS/Data%20Connectors/template_DNS.JSON","true" -"","WireX Network Forensics Platform","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/WireX%20Network%20Forensics%20Platform","wirexsystems1584682625009","wirex_network_forensics_platform_mss","2022-05-06","","","WireX Systems","Partner","https://wirexsystems.com/contact-us/","","domains","","","","","","","false","","false" -"CommonSecurityLog","WireX Network Forensics Platform","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/WireX%20Network%20Forensics%20Platform","wirexsystems1584682625009","wirex_network_forensics_platform_mss","2022-05-06","","","WireX Systems","Partner","https://wirexsystems.com/contact-us/","","domains","WireX_Systems_NFP","WireX_Systems","[Deprecated] WireX Network Forensics Platform via Legacy Agent","The WireX Systems data connector allows security professional to integrate with Microsoft Sentinel to allow you to further enrich your forensics investigations; to not only encompass the contextual content offered by WireX but to analyze data from other sources, and to create custom dashboards to give the most complete picture during a forensic investigation and to create custom workflows.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Contact WireX support (https://wirexsystems.com/contact-us/) in order to configure your NFP solution to send Syslog messages in CEF format to the proxy machine. Make sure that they central manager can send the logs to port 514 TCP on the machine's IP address.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/WireX%20Network%20Forensics%20Platform/Data%20Connectors/WireXsystemsNFP%281b%29.json","true" -"CommonSecurityLog","WireX Network Forensics Platform","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/WireX%20Network%20Forensics%20Platform","wirexsystems1584682625009","wirex_network_forensics_platform_mss","2022-05-06","","","WireX Systems","Partner","https://wirexsystems.com/contact-us/","","domains","WireX_Systems_NFPAma","WireX_Systems","[Deprecated] WireX Network Forensics Platform via AMA","The WireX Systems data connector allows security professional to integrate with Microsoft Sentinel to allow you to further enrich your forensics investigations; to not only encompass the contextual content offered by WireX but to analyze data from other sources, and to create custom dashboards to give the most complete picture during a forensic investigation and to create custom workflows.","[{""title"": """", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine""}, {""title"": ""Step B. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Contact WireX support (https://wirexsystems.com/contact-us/) in order to configure your NFP solution to send Syslog messages in CEF format to the proxy machine. Make sure that they central manager can send the logs to port 514 TCP on the machine's IP address.""}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/WireX%20Network%20Forensics%20Platform/Data%20Connectors/template_WireXsystemsNFPAMA.json","true" -"","WithSecureElementsViaConnector","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/WithSecureElementsViaConnector","withsecurecorporation","sentinel-solution-withsecure-via-connector","2022-11-03","2022-11-03","","WithSecure","Partner","https://www.withsecure.com/en/support","","domains","","","","","","","false","","false" -"CommonSecurityLog","WithSecureElementsViaConnector","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/WithSecureElementsViaConnector","withsecurecorporation","sentinel-solution-withsecure-via-connector","2022-11-03","2022-11-03","","WithSecure","Partner","https://www.withsecure.com/en/support","","domains","WithSecureElementsViaConnector","WithSecure","[Deprecated] WithSecure Elements via Connector","WithSecure Elements is a unified cloud-based cyber security platform.
By connecting WithSecure Elements via Connector to Microsoft Sentinel, security events can be received in Common Event Format (CEF) over syslog.
It requires deploying ""Elements Connector"" either on-prem or in cloud.
The Common Event Format (CEF) provides natively search & correlation, alerting and threat intelligence enrichment for each data log.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your WithSecurity solution and Sentinel. The machine can be on-prem environment, Microsoft Azure or other cloud based.\n> Linux needs to have `syslog-ng` and `python`/`python3` installed.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""For python3 use command below:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python3 cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward data from WithSecure Elements Connector to Syslog agent"", ""description"": ""This describes how to install and configure Elements Connector step by step."", ""innerSteps"": [{""title"": ""2.1 Order Connector subscription"", ""description"": ""If Connector subscription has not been ordered yet go to EPP in Elements Portal. Then navigate to Downloads and in Elements Connector section click 'Create subscription key' button. You can check Your subscription key in Subscriptions.""}, {""title"": ""2.2 Download Connector"", ""description"": ""Go to Downloads and in WithSecure Elements Connector section select correct installer.""}, {""title"": ""2.3 Create management API key"", ""description"": ""When in EPP open account settings in top right corner. Then select Get management API key. If key has been created earlier it can be read there as well.""}, {""title"": ""2.4 Install Connector"", ""description"": ""To install Elements Connector follow [Elements Connector Docs](https://www.withsecure.com/userguides/product.html#business/connector/latest/en/).""}, {""title"": ""2.5 Configure event forwarding"", ""description"": ""If api access has not been configured during installation follow [Configuring API access for Elements Connector](https://www.withsecure.com/userguides/product.html#business/connector/latest/en/task_F657F4D0F2144CD5913EE510E155E234-latest-en).\nThen go to EPP, then Profiles, then use For Connector from where you can see the connector profiles. Create a new profile (or edit an existing not read-only profile). In Event forwarding enable it. SIEM system address: **127.0.0.1:514**. Set format to **Common Event Format**. Protocol is **TCP**. Save profile and assign it to Elements Connector in Devices tab.""}]}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""For python3 use command below:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python3 cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/WithSecureElementsViaConnector/Data%20Connectors/WithSecureElementsViaConnector.json","true" -"","WithSecureElementsViaFunction","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/WithSecureElementsViaFunction","withsecurecorporation","sentinel-solution-withsecure-via-function","2024-02-22","2025-04-25","","WithSecure","Partner","https://www.withsecure.com/en/support","","domains","","","","","","","false","","false" -"WsSecurityEvents_CL","WithSecureElementsViaFunction","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/WithSecureElementsViaFunction","withsecurecorporation","sentinel-solution-withsecure-via-function","2024-02-22","2025-04-25","","WithSecure","Partner","https://www.withsecure.com/en/support","","domains","WithSecureElementsViaFunction","WithSecure","WithSecure Elements API (Azure Function)","WithSecure Elements is the unified cloud-based cyber security platform designed to reduce risk, complexity, and inefficiency.

Elevate your security from your endpoints to your cloud applications. Arm yourself against every type of cyber threat, from targeted attacks to zero-day ransomware.

WithSecure Elements combines powerful predictive, preventive, and responsive security capabilities - all managed and monitored through a single security center. Our modular structure and flexible pricing models give you the freedom to evolve. With our expertise and insight, you'll always be empowered - and you'll never be alone.

With Microsoft Sentinel integration, you can correlate [security events](https://connect.withsecure.com/api-reference/security-events#overview) data from the WithSecure Elements solution with data from other sources, enabling a rich overview of your entire environment and faster reaction to threats.

With this solution Azure Function is deployed to your tenant, polling periodically for the WithSecure Elements security events.

For more information visit our website at: [https://www.withsecure.com](https://www.withsecure.com).","[{""title"": ""1. Create WithSecure Elements API credentials"", ""description"": ""Follow the [user guide](https://connect.withsecure.com/getting-started/elements#getting-client-credentials) to create Elements API credentials. Save credentials in a safe place.""}, {""title"": ""2. Create Microsoft Entra application"", ""description"": ""Create new Microsoft Entra application and credentials. Follow [the instructions](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-microsoft-entra-application) and store values of **Directory (tenant) ID**, **Object ID**, **Application (client) ID** and **Client Secret** (from client credentials field). Remember to store Client Secret in a safe place.""}, {""title"": ""3. Deploy Function App"", ""description"": "">**NOTE:** This connector uses Azure Functions to pull logs from WithSecure Elements. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store Microsoft Entra client credentials and WithSecure Elements API client credentials in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": "">**IMPORTANT:** Before deploying the WithSecure Elements connector, have the Workspace Name (can be copied from the following), data from Microsoft Entra (Directory (tenant) ID, Object ID, Application (client) ID and Client Secret), as well as the WithSecure Elements client credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""workspaceName""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Deploy all the resources related to the connector"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-WithSecureElementsViaFunction-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Entra Client ID**, **Entra Client Secret**, **Entra Tenant ID**, **Elements API Client ID**, **Elements API Client Secret**.\n>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. You can also fill in optional fields: **Elements API url**, **Engine**, **Engine Group**. Use default value of **Elements API url** unless you have some special case. **Engine** and **Engine Group** map to [security events request parameters](https://connect.withsecure.com/api-reference/elements#post-/security-events/v1/security-events), fill in those parameters if you are interested only in events from specific engine or engine group, in case you want to receive all security events leave the fields with default values.\n5. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n6. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""WithSecure Elements API client credentials"", ""description"": ""Client credentials are required. [See the documentation to learn more.](https://connect.withsecure.com/getting-started/elements#getting-client-credentials)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/WithSecureElementsViaFunction/Data%20Connectors/WithSecureElementsViaFunction.json","true" -"","Wiz","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Wiz","wizinc1627338511749","wizinc1627338511749_wiz_mss-sentinel","2023-06-20","","","Wiz","Partner","https://support.wiz.io/","","domains","","","","","","","false","","false" -"WizAuditLogsV2_CL","Wiz","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Wiz","wizinc1627338511749","wizinc1627338511749_wiz_mss-sentinel","2023-06-20","","","Wiz","Partner","https://support.wiz.io/","","domains","Wiz","Wiz","Wiz","The Wiz connector allows you to easily send Wiz Issues, Vulnerability Findings, and Audit logs to Microsoft Sentinel.","[{""description"": "">**NOTE:** This connector: Uses Azure Functions to connect to Wiz API to pull Wiz Issues, Vulnerability Findings, and Audit Logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\nCreates an Azure Key Vault with all the required parameters stored as secrets.""}, {""description"": ""\nFollow the instructions on [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#collect-authentication-info-from-wiz) to get the erquired credentials."", ""title"": ""STEP 1 - Get your Wiz credentials""}, {""description"": ""\n>**IMPORTANT:** Before deploying the Wiz Connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Wiz credentials from the previous step."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}], ""title"": ""STEP 2 - Deploy the connector and the associated Azure Function""}, {""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-wiz-azuredeploy) \n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the following parameters: \n> - Choose **KeyVaultName** and **FunctionName** for the new resources \n >- Enter the following Wiz credentials from step 1: **WizAuthUrl**, **WizEndpointUrl**, **WizClientId**, and **WizClientSecret** \n>- Enter the Workspace credentials **AzureLogsAnalyticsWorkspaceId** and **AzureLogAnalyticsWorkspaceSharedKey**\n>- Choose the Wiz data types you want to send to Microsoft Sentinel, choose at least one from **Wiz Issues**, **Vulnerability Findings**, and **Audit Logs**.\n \n>- (optional) follow [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#optional-create-a-filter-for-wiz-queries) to add **IssuesQueryFilter**, **VulnerbailitiesQueryFilter**, and **AuditLogsQueryFilter**.\n \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n"", ""title"": ""Option 1: Deploy using the Azure Resource Manager (ARM) Template""}, {""description"": "">Follow [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#manual-deployment) to deploy the connector manually."", ""title"": ""Option 2: Manual Deployment of the Azure Function""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Wiz Service Account credentials"", ""description"": ""Ensure you have your Wiz service account client ID and client secret, API endpoint URL, and auth URL. Instructions can be found on [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#collect-authentication-info-from-wiz).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Wiz/Data%20Connectors/template_WIZ.json","true" -"WizAuditLogs_CL","Wiz","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Wiz","wizinc1627338511749","wizinc1627338511749_wiz_mss-sentinel","2023-06-20","","","Wiz","Partner","https://support.wiz.io/","","domains","Wiz","Wiz","Wiz","The Wiz connector allows you to easily send Wiz Issues, Vulnerability Findings, and Audit logs to Microsoft Sentinel.","[{""description"": "">**NOTE:** This connector: Uses Azure Functions to connect to Wiz API to pull Wiz Issues, Vulnerability Findings, and Audit Logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\nCreates an Azure Key Vault with all the required parameters stored as secrets.""}, {""description"": ""\nFollow the instructions on [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#collect-authentication-info-from-wiz) to get the erquired credentials."", ""title"": ""STEP 1 - Get your Wiz credentials""}, {""description"": ""\n>**IMPORTANT:** Before deploying the Wiz Connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Wiz credentials from the previous step."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}], ""title"": ""STEP 2 - Deploy the connector and the associated Azure Function""}, {""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-wiz-azuredeploy) \n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the following parameters: \n> - Choose **KeyVaultName** and **FunctionName** for the new resources \n >- Enter the following Wiz credentials from step 1: **WizAuthUrl**, **WizEndpointUrl**, **WizClientId**, and **WizClientSecret** \n>- Enter the Workspace credentials **AzureLogsAnalyticsWorkspaceId** and **AzureLogAnalyticsWorkspaceSharedKey**\n>- Choose the Wiz data types you want to send to Microsoft Sentinel, choose at least one from **Wiz Issues**, **Vulnerability Findings**, and **Audit Logs**.\n \n>- (optional) follow [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#optional-create-a-filter-for-wiz-queries) to add **IssuesQueryFilter**, **VulnerbailitiesQueryFilter**, and **AuditLogsQueryFilter**.\n \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n"", ""title"": ""Option 1: Deploy using the Azure Resource Manager (ARM) Template""}, {""description"": "">Follow [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#manual-deployment) to deploy the connector manually."", ""title"": ""Option 2: Manual Deployment of the Azure Function""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Wiz Service Account credentials"", ""description"": ""Ensure you have your Wiz service account client ID and client secret, API endpoint URL, and auth URL. Instructions can be found on [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#collect-authentication-info-from-wiz).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Wiz/Data%20Connectors/template_WIZ.json","true" -"WizIssuesV2_CL","Wiz","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Wiz","wizinc1627338511749","wizinc1627338511749_wiz_mss-sentinel","2023-06-20","","","Wiz","Partner","https://support.wiz.io/","","domains","Wiz","Wiz","Wiz","The Wiz connector allows you to easily send Wiz Issues, Vulnerability Findings, and Audit logs to Microsoft Sentinel.","[{""description"": "">**NOTE:** This connector: Uses Azure Functions to connect to Wiz API to pull Wiz Issues, Vulnerability Findings, and Audit Logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\nCreates an Azure Key Vault with all the required parameters stored as secrets.""}, {""description"": ""\nFollow the instructions on [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#collect-authentication-info-from-wiz) to get the erquired credentials."", ""title"": ""STEP 1 - Get your Wiz credentials""}, {""description"": ""\n>**IMPORTANT:** Before deploying the Wiz Connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Wiz credentials from the previous step."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}], ""title"": ""STEP 2 - Deploy the connector and the associated Azure Function""}, {""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-wiz-azuredeploy) \n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the following parameters: \n> - Choose **KeyVaultName** and **FunctionName** for the new resources \n >- Enter the following Wiz credentials from step 1: **WizAuthUrl**, **WizEndpointUrl**, **WizClientId**, and **WizClientSecret** \n>- Enter the Workspace credentials **AzureLogsAnalyticsWorkspaceId** and **AzureLogAnalyticsWorkspaceSharedKey**\n>- Choose the Wiz data types you want to send to Microsoft Sentinel, choose at least one from **Wiz Issues**, **Vulnerability Findings**, and **Audit Logs**.\n \n>- (optional) follow [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#optional-create-a-filter-for-wiz-queries) to add **IssuesQueryFilter**, **VulnerbailitiesQueryFilter**, and **AuditLogsQueryFilter**.\n \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n"", ""title"": ""Option 1: Deploy using the Azure Resource Manager (ARM) Template""}, {""description"": "">Follow [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#manual-deployment) to deploy the connector manually."", ""title"": ""Option 2: Manual Deployment of the Azure Function""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Wiz Service Account credentials"", ""description"": ""Ensure you have your Wiz service account client ID and client secret, API endpoint URL, and auth URL. Instructions can be found on [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#collect-authentication-info-from-wiz).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Wiz/Data%20Connectors/template_WIZ.json","true" -"WizIssues_CL","Wiz","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Wiz","wizinc1627338511749","wizinc1627338511749_wiz_mss-sentinel","2023-06-20","","","Wiz","Partner","https://support.wiz.io/","","domains","Wiz","Wiz","Wiz","The Wiz connector allows you to easily send Wiz Issues, Vulnerability Findings, and Audit logs to Microsoft Sentinel.","[{""description"": "">**NOTE:** This connector: Uses Azure Functions to connect to Wiz API to pull Wiz Issues, Vulnerability Findings, and Audit Logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\nCreates an Azure Key Vault with all the required parameters stored as secrets.""}, {""description"": ""\nFollow the instructions on [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#collect-authentication-info-from-wiz) to get the erquired credentials."", ""title"": ""STEP 1 - Get your Wiz credentials""}, {""description"": ""\n>**IMPORTANT:** Before deploying the Wiz Connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Wiz credentials from the previous step."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}], ""title"": ""STEP 2 - Deploy the connector and the associated Azure Function""}, {""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-wiz-azuredeploy) \n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the following parameters: \n> - Choose **KeyVaultName** and **FunctionName** for the new resources \n >- Enter the following Wiz credentials from step 1: **WizAuthUrl**, **WizEndpointUrl**, **WizClientId**, and **WizClientSecret** \n>- Enter the Workspace credentials **AzureLogsAnalyticsWorkspaceId** and **AzureLogAnalyticsWorkspaceSharedKey**\n>- Choose the Wiz data types you want to send to Microsoft Sentinel, choose at least one from **Wiz Issues**, **Vulnerability Findings**, and **Audit Logs**.\n \n>- (optional) follow [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#optional-create-a-filter-for-wiz-queries) to add **IssuesQueryFilter**, **VulnerbailitiesQueryFilter**, and **AuditLogsQueryFilter**.\n \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n"", ""title"": ""Option 1: Deploy using the Azure Resource Manager (ARM) Template""}, {""description"": "">Follow [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#manual-deployment) to deploy the connector manually."", ""title"": ""Option 2: Manual Deployment of the Azure Function""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Wiz Service Account credentials"", ""description"": ""Ensure you have your Wiz service account client ID and client secret, API endpoint URL, and auth URL. Instructions can be found on [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#collect-authentication-info-from-wiz).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Wiz/Data%20Connectors/template_WIZ.json","true" -"WizVulnerabilitiesV2_CL","Wiz","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Wiz","wizinc1627338511749","wizinc1627338511749_wiz_mss-sentinel","2023-06-20","","","Wiz","Partner","https://support.wiz.io/","","domains","Wiz","Wiz","Wiz","The Wiz connector allows you to easily send Wiz Issues, Vulnerability Findings, and Audit logs to Microsoft Sentinel.","[{""description"": "">**NOTE:** This connector: Uses Azure Functions to connect to Wiz API to pull Wiz Issues, Vulnerability Findings, and Audit Logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\nCreates an Azure Key Vault with all the required parameters stored as secrets.""}, {""description"": ""\nFollow the instructions on [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#collect-authentication-info-from-wiz) to get the erquired credentials."", ""title"": ""STEP 1 - Get your Wiz credentials""}, {""description"": ""\n>**IMPORTANT:** Before deploying the Wiz Connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Wiz credentials from the previous step."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}], ""title"": ""STEP 2 - Deploy the connector and the associated Azure Function""}, {""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-wiz-azuredeploy) \n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the following parameters: \n> - Choose **KeyVaultName** and **FunctionName** for the new resources \n >- Enter the following Wiz credentials from step 1: **WizAuthUrl**, **WizEndpointUrl**, **WizClientId**, and **WizClientSecret** \n>- Enter the Workspace credentials **AzureLogsAnalyticsWorkspaceId** and **AzureLogAnalyticsWorkspaceSharedKey**\n>- Choose the Wiz data types you want to send to Microsoft Sentinel, choose at least one from **Wiz Issues**, **Vulnerability Findings**, and **Audit Logs**.\n \n>- (optional) follow [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#optional-create-a-filter-for-wiz-queries) to add **IssuesQueryFilter**, **VulnerbailitiesQueryFilter**, and **AuditLogsQueryFilter**.\n \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n"", ""title"": ""Option 1: Deploy using the Azure Resource Manager (ARM) Template""}, {""description"": "">Follow [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#manual-deployment) to deploy the connector manually."", ""title"": ""Option 2: Manual Deployment of the Azure Function""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Wiz Service Account credentials"", ""description"": ""Ensure you have your Wiz service account client ID and client secret, API endpoint URL, and auth URL. Instructions can be found on [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#collect-authentication-info-from-wiz).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Wiz/Data%20Connectors/template_WIZ.json","true" -"WizVulnerabilities_CL","Wiz","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Wiz","wizinc1627338511749","wizinc1627338511749_wiz_mss-sentinel","2023-06-20","","","Wiz","Partner","https://support.wiz.io/","","domains","Wiz","Wiz","Wiz","The Wiz connector allows you to easily send Wiz Issues, Vulnerability Findings, and Audit logs to Microsoft Sentinel.","[{""description"": "">**NOTE:** This connector: Uses Azure Functions to connect to Wiz API to pull Wiz Issues, Vulnerability Findings, and Audit Logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\nCreates an Azure Key Vault with all the required parameters stored as secrets.""}, {""description"": ""\nFollow the instructions on [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#collect-authentication-info-from-wiz) to get the erquired credentials."", ""title"": ""STEP 1 - Get your Wiz credentials""}, {""description"": ""\n>**IMPORTANT:** Before deploying the Wiz Connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Wiz credentials from the previous step."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}], ""title"": ""STEP 2 - Deploy the connector and the associated Azure Function""}, {""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-wiz-azuredeploy) \n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the following parameters: \n> - Choose **KeyVaultName** and **FunctionName** for the new resources \n >- Enter the following Wiz credentials from step 1: **WizAuthUrl**, **WizEndpointUrl**, **WizClientId**, and **WizClientSecret** \n>- Enter the Workspace credentials **AzureLogsAnalyticsWorkspaceId** and **AzureLogAnalyticsWorkspaceSharedKey**\n>- Choose the Wiz data types you want to send to Microsoft Sentinel, choose at least one from **Wiz Issues**, **Vulnerability Findings**, and **Audit Logs**.\n \n>- (optional) follow [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#optional-create-a-filter-for-wiz-queries) to add **IssuesQueryFilter**, **VulnerbailitiesQueryFilter**, and **AuditLogsQueryFilter**.\n \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n"", ""title"": ""Option 1: Deploy using the Azure Resource Manager (ARM) Template""}, {""description"": "">Follow [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#manual-deployment) to deploy the connector manually."", ""title"": ""Option 2: Manual Deployment of the Azure Function""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Wiz Service Account credentials"", ""description"": ""Ensure you have your Wiz service account client ID and client secret, API endpoint URL, and auth URL. Instructions can be found on [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#collect-authentication-info-from-wiz).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Wiz/Data%20Connectors/template_WIZ.json","true" -"","Workday","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Workday","azuresentinel","azure-sentinel-solution-workday","2024-02-15","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"","Workplace from Facebook","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Workplace%20from%20Facebook","azuresentinel","azure-sentinel-solution-workplacefromfacebook","2022-05-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"Workplace_Facebook_CL","Workplace from Facebook","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Workplace%20from%20Facebook","azuresentinel","azure-sentinel-solution-workplacefromfacebook","2022-05-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","WorkplaceFacebook","Facebook","Workplace from Facebook","The [Workplace](https://www.workplace.com/) data connector provides the capability to ingest common Workplace events into Microsoft Sentinel through Webhooks. Webhooks enable custom integration apps to subscribe to events in Workplace and receive updates in real time. When a change occurs in Workplace, an HTTPS POST request with event information is sent to a callback data connector URL. Refer to [Webhooks documentation](https://developers.facebook.com/docs/workplace/reference/webhooks) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""title"": """", ""description"": "">**NOTE:** This data connector uses Azure Functions based on HTTP Trigger for waiting POST requests with logs to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias WorkplaceFacebook and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Workplace%20from%20Facebook/Parsers/Workplace_Facebook.txt) on the second line of the query, enter the hostname(s) of your Workplace Facebook device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Workplace**\n\n Follow the instructions to configure Webhooks.\n\n1. Log in to the Workplace with Admin user credentials.\n2. In the Admin panel, click **Integrations**.\n3. In the **All integrations** view, click **Create custom integration**\n4. Enter the name and description and click **Create**.\n5. In the **Integration details** panel show **App secret** and copy.\n6. In the **Integration permissions** pannel set all read permissions. Refer to [permission page](https://developers.facebook.com/docs/workplace/reference/permissions) for details.\n7. Now proceed to STEP 2 to follow the steps (listed in Option 1 or 2) to Deploy the Azure Function.\n8. Enter the requested parameters and also enter a Token of choice. Copy this Token / Note it for the upcoming step.\n9. After the deployment of Azure Functions completes successfully, open Function App page, select your app, go to **Functions**, click **Get Function URL** and copy this / Note it for the upcoming step.\n10. Go back to Workplace from Facebook. In the **Configure webhooks** panel on each Tab set **Callback URL** as the same value that you copied in point 9 above and Verify token as the same\n value you copied in point 8 above which was obtained during STEP 2 of Azure Functions deployment.\n11. Click Save.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Workplace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Workplace data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-WorkplaceFacebook-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-WorkplaceFacebook-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **WorkplaceVerifyToken** (can be any expression, copy and save it for STEP 1), **WorkplaceAppSecret** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n6. After deploying open Function App page, select your app, go to the **Functions** and click **Get Function Url** copy it and follow p.7 from STEP 1.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Sophos Endpoint Protection data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-WorkplaceFacebook-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkplaceAppSecret\n\t\tWorkplaceVerifyToken\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Webhooks Credentials/permissions"", ""description"": ""WorkplaceAppSecret, WorkplaceVerifyToken, Callback URL are required for working Webhooks. See the documentation to learn more about [configuring Webhooks](https://developers.facebook.com/docs/workplace/reference/webhooks), [configuring permissions](https://developers.facebook.com/docs/workplace/reference/permissions). ""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Workplace%20from%20Facebook/Data%20Connectors/WorkplaceFacebook/WorkplaceFacebook_Webhooks_FunctionApp.json","true" -"","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","","","","","","","false","","false" -"ZeroFoxAlertPoller_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxAlertsDefinition","ZeroFox Enterprise","ZeroFox Enterprise - Alerts (Polling CCF)","Collects alerts from ZeroFox API.","[{""description"": ""Connect ZeroFox to Microsoft Sentinel"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Provide your ZeroFox PAT"", ""placeholder"": ""Zerofox PAT"", ""type"": ""password"", ""name"": ""apikey"", ""validations"": {""required"": true}}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""title"": ""Connect ZeroFox to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/solutions"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true, ""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""ZeroFox Personal Access Token (PAT)"", ""description"": ""A ZeroFox PAT is required. You can get it in Data Connectors > [API Data Feeds](https://cloud.zerofox.com/data_connectors/api).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/Alerts/ZeroFoxAlerts_ConnectorDefinition.json","true" -"ZeroFox_CTI_C2_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" -"ZeroFox_CTI_advanced_dark_web_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" -"ZeroFox_CTI_botnet_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" -"ZeroFox_CTI_breaches_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" -"ZeroFox_CTI_compromised_credentials_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" -"ZeroFox_CTI_credit_cards_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" -"ZeroFox_CTI_dark_web_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" -"ZeroFox_CTI_discord_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" -"ZeroFox_CTI_disruption_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" -"ZeroFox_CTI_email_addresses_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" -"ZeroFox_CTI_exploits_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" -"ZeroFox_CTI_irc_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" -"ZeroFox_CTI_malware_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" -"ZeroFox_CTI_national_ids_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" -"ZeroFox_CTI_phishing_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" -"ZeroFox_CTI_phone_numbers_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" -"ZeroFox_CTI_ransomware_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" -"ZeroFox_CTI_telegram_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" -"ZeroFox_CTI_threat_actors_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" -"ZeroFox_CTI_vulnerabilities_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" -"","ZeroNetworks","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroNetworks","zeronetworksltd1629013803351","azure-sentinel-solution-znsegmentaudit","2022-06-06","2025-09-17","","Zero Networks","Partner","https://zeronetworks.com","","domains","","","","","","","false","","false" -"ZNSegmentAuditNativePoller_CL","ZeroNetworks","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroNetworks","zeronetworksltd1629013803351","azure-sentinel-solution-znsegmentaudit","2022-06-06","2025-09-17","","Zero Networks","Partner","https://zeronetworks.com","","domains","ZeroNetworksSegmentAuditNativePoller","Zero Networks","Zero Networks Segment Audit","The [Zero Networks Segment](https://zeronetworks.com/) Audit data connector provides the capability to ingest Zero Networks Audit events into Microsoft Sentinel through the REST API. This data connector uses Microsoft Sentinel native polling capability.","[{""title"": ""Connect Zero Networks to Microsoft Sentinel"", ""description"": ""Enable Zero Networks audit Logs."", ""instructions"": [{""parameters"": {""enable"": ""true""}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Zero Networks API Token"", ""description"": ""**ZeroNetworksAPIToken** is required for REST API. See the API Guide and follow the instructions for obtaining credentials.""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroNetworks/Data%20Connectors/SegmentNativePollerConnector/azuredeploy_ZeroNetworks_Segment_native_poller_connector.json","true" -"","ZeroTrust(TIC3.0)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroTrust%28TIC3.0%29","azuresentinel","azure-sentinel-solution-zerotrust","2021-10-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"","Zimperium Mobile Threat Defense","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Zimperium%20Mobile%20Threat%20Defense","zimperiuminc","zimperium_mobile_threat_defense_mss","2022-05-02","","","Zimperium","Partner","https://www.zimperium.com/support/","","domains","","","","","","","false","","false" -"ZimperiumMitigationLog_CL","Zimperium Mobile Threat Defense","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Zimperium%20Mobile%20Threat%20Defense","zimperiuminc","zimperium_mobile_threat_defense_mss","2022-05-02","","","Zimperium","Partner","https://www.zimperium.com/support/","","domains","ZimperiumMtdAlerts","Zimperium","Zimperium Mobile Threat Defense","Zimperium Mobile Threat Defense connector gives you the ability to connect the Zimperium threat log with Microsoft Sentinel to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's mobile threat landscape and enhances your security operation capabilities.","[{""title"": ""Configure and connect Zimperium MTD"", ""description"": ""1. In zConsole, click **Manage** on the navigation bar.\n2. Click the **Integrations** tab.\n3. Click the **Threat Reporting** button and then the **Add Integrations** button.\n4. Create the Integration:\n - From the available integrations, select Microsoft Sentinel.\n - Enter your workspace id and primary key from the fields below, click **Next**.\n - Fill in a name for your Microsoft Sentinel integration.\n - Select a Filter Level for the threat data you wish to push to Microsoft Sentinel.\n - Click **Finish**\n5. For additional instructions, please refer to the [Zimperium customer support portal](https://support.zimperium.com)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Zimperium%20Mobile%20Threat%20Defense/Data%20Connectors/Zimperium%20MTD%20Alerts.json","true" -"ZimperiumThreatLog_CL","Zimperium Mobile Threat Defense","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Zimperium%20Mobile%20Threat%20Defense","zimperiuminc","zimperium_mobile_threat_defense_mss","2022-05-02","","","Zimperium","Partner","https://www.zimperium.com/support/","","domains","ZimperiumMtdAlerts","Zimperium","Zimperium Mobile Threat Defense","Zimperium Mobile Threat Defense connector gives you the ability to connect the Zimperium threat log with Microsoft Sentinel to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's mobile threat landscape and enhances your security operation capabilities.","[{""title"": ""Configure and connect Zimperium MTD"", ""description"": ""1. In zConsole, click **Manage** on the navigation bar.\n2. Click the **Integrations** tab.\n3. Click the **Threat Reporting** button and then the **Add Integrations** button.\n4. Create the Integration:\n - From the available integrations, select Microsoft Sentinel.\n - Enter your workspace id and primary key from the fields below, click **Next**.\n - Fill in a name for your Microsoft Sentinel integration.\n - Select a Filter Level for the threat data you wish to push to Microsoft Sentinel.\n - Click **Finish**\n5. For additional instructions, please refer to the [Zimperium customer support portal](https://support.zimperium.com)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Zimperium%20Mobile%20Threat%20Defense/Data%20Connectors/Zimperium%20MTD%20Alerts.json","true" -"","Zinc Open Source","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Zinc%20Open%20Source","azuresentinel","azure-sentinel-solution-zincopensource","2022-10-03","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"","ZoomReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZoomReports","azuresentinel","azure-sentinel-solution-zoomreports","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","false","","false" -"Zoom_CL","ZoomReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZoomReports","azuresentinel","azure-sentinel-solution-zoomreports","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","Zoom","Zoom","Zoom Reports","The [Zoom](https://zoom.us/) Reports data connector provides the capability to ingest [Zoom Reports](https://developers.zoom.us/docs/api/rest/reference/zoom-api/methods/#tag/Reports) events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://developers.zoom.us/docs/api/) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Zoom API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Zoom and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZoomReports/Parsers/Zoom.yaml). The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Zoom API**\n\n [Follow the instructions](https://developers.zoom.us/docs/internal-apps/create/) to obtain the credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Zoom Reports data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Zoom Audit data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ZoomAPI-azuredeployV2) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-ZoomAPI-azuredeployV2-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Region**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **AccountID**, **ClientID**, **ClientSecret**, **WorkspaceID**, **WorkspaceKey**, **Function Name** and click Review + create. \n4. Finally click **Create** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Zoom Reports data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-ZoomAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ZoomXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tAccountID\n\t\tClientID\n\t\tClientSecret\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**AccountID**, **ClientID** and **ClientSecret** are required for Zoom API. [See the documentation to learn more about Zoom API](https://developers.zoom.us/docs/internal-apps/create/). [Follow the instructions for Zoom API configurations](https://aka.ms/sentinel-zoomreports-readme).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZoomReports/Data%20Connectors/ZoomReports_API_FunctionApp.json","true" -"","Zscaler Internet Access","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Zscaler%20Internet%20Access","zscaler1579058425289","zscaler_internet_access_mss","2022-05-25","","","Zscaler","Partner","https://help.zscaler.com/submit-ticket-links","","domains","","","","","","","false","","false" -"CommonSecurityLog","Zscaler Internet Access","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Zscaler%20Internet%20Access","zscaler1579058425289","zscaler_internet_access_mss","2022-05-25","","","Zscaler","Partner","https://help.zscaler.com/submit-ticket-links","","domains","Zscaler","Zscaler","[Deprecated] Zscaler via Legacy Agent","The Zscaler data connector allows you to easily connect your Zscaler Internet Access (ZIA) logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. Using Zscaler on Microsoft Sentinel will provide you more insights into your organization’s Internet usage, and will enhance its security operation capabilities.​","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python --version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Set Zscaler product to send Syslog messages in CEF format to your Syslog agent. Make sure you to send the logs on port 514 TCP. \n\nGo to [Zscaler Microsoft Sentinel integration guide](https://aka.ms/ZscalerCEFInstructions) to learn more.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python --version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Zscaler%20Internet%20Access/Data%20Connectors/template_Zscaler.JSON","true" -"CommonSecurityLog","Zscaler Internet Access","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Zscaler%20Internet%20Access","zscaler1579058425289","zscaler_internet_access_mss","2022-05-25","","","Zscaler","Partner","https://help.zscaler.com/submit-ticket-links","","domains","ZscalerAma","Zscaler","[Deprecated] Zscaler via AMA","The Zscaler data connector allows you to easily connect your Zscaler Internet Access (ZIA) logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. Using Zscaler on Microsoft Sentinel will provide you more insights into your organization’s Internet usage, and will enhance its security operation capabilities.​","[{""title"": """", ""description"": """", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine""}, {""title"": ""Step B. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Set Zscaler product to send Syslog messages in CEF format to your Syslog agent. Make sure you to send the logs on port 514 TCP. \n\nGo to [Zscaler Microsoft Sentinel integration guide](https://aka.ms/ZscalerCEFInstructions) to learn more.""}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Zscaler%20Internet%20Access/Data%20Connectors/template_ZscalerAma.JSON","true" -"","Zscaler Private Access (ZPA)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Zscaler%20Private%20Access%20%28ZPA%29","azuresentinel","azure-sentinel-solution-zscalerprivateaccess","2022-01-31","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","false","","false" -"ZPA_CL","Zscaler Private Access (ZPA)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Zscaler%20Private%20Access%20%28ZPA%29","azuresentinel","azure-sentinel-solution-zscalerprivateaccess","2022-01-31","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","ZscalerPrivateAccess","Zscaler","[Deprecated] Zscaler Private Access","The [Zscaler Private Access (ZPA)](https://help.zscaler.com/zpa/what-zscaler-private-access) data connector provides the capability to ingest [Zscaler Private Access events](https://help.zscaler.com/zpa/log-streaming-service) into Microsoft Sentinel. Refer to [Zscaler Private Access documentation](https://help.zscaler.com/zpa) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://aka.ms/sentinel-ZscalerPrivateAccess-parser) to create the Kusto Functions alias, **ZPAEvent**"", ""instructions"": []}, {""title"": """", ""description"": "">**NOTE:** This data connector has been developed using Zscaler Private Access version: 21.67.1"", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Server where the Zscaler Private Access logs are forwarded.\n\n> Logs from Zscaler Private Access Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Follow the configuration steps below to get Zscaler Private Access logs into Microsoft Sentinel. Refer to the [Azure Monitor Documentation](https://docs.microsoft.com/azure/azure-monitor/agents/data-sources-json) for more details on these steps.\nZscaler Private Access logs are delivered via Log Streaming Service (LSS). Refer to [LSS documentation](https://help.zscaler.com/zpa/about-log-streaming-service) for detailed information\n1. Configure [Log Receivers](https://help.zscaler.com/zpa/configuring-log-receiver). While configuring a Log Receiver, choose **JSON** as **Log Template**.\n2. Download config file [zpa.conf](https://aka.ms/sentinel-ZscalerPrivateAccess-conf) \n\t\twget -v https://aka.ms/sentinel-zscalerprivateaccess-conf -O zpa.conf\n3. Login to the server where you have installed Azure Log Analytics agent.\n4. Copy zpa.conf to the /etc/opt/microsoft/omsagent/**workspace_id**/conf/omsagent.d/ folder.\n5. Edit zpa.conf as follows:\n\n\t a. specify port which you have set your Zscaler Log Receivers to forward logs to (line 4)\n\n\t b. zpa.conf uses the port **22033** by default. Ensure this port is not being used by any other source on your server\n\n\t c. If you would like to change the default port for **zpa.conf** make sure that it should not get conflict with default AMA agent ports I.e.(For example CEF uses TCP port **25226** or **25224**) \n\n\t d. replace **workspace_id** with real value of your Workspace ID (lines 14,15,16,19)\n5. Save changes and restart the Azure Log Analytics agent for Linux service with the following command:\n\t\tsudo /opt/microsoft/omsagent/bin/service_control restart"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Zscaler%20Private%20Access%20%28ZPA%29/Data%20Connectors/Connector_LogAnalytics_agent_Zscaler_ZPA.json","true" -"","archTIS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/archTIS","nucleuscyber","nc-protect-azure-sentinel-data-connector","2021-10-20","","","archTIS","Partner","https://www.archtis.com/nc-protect-support/","","domains","","","","","","","false","","false" -"NCProtectUAL_CL","archTIS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/archTIS","nucleuscyber","nc-protect-azure-sentinel-data-connector","2021-10-20","","","archTIS","Partner","https://www.archtis.com/nc-protect-support/","","domains","NucleusCyberNCProtect","archTIS","NC Protect","[NC Protect Data Connector (archtis.com)](https://info.archtis.com/get-started-with-nc-protect-sentinel-data-connector) provides the capability to ingest user activity logs and events into Microsoft Sentinel. The connector provides visibility into NC Protect user activity logs and events in Microsoft Sentinel to improve monitoring and investigation capabilities","[{""title"": """", ""description"": ""1. Install NC Protect into your Azure Tenancy\n2. Log into the NC Protect Administration site\n3. From the left hand navigation menu, select General -> User Activity Monitoring\n4. Tick the checkbox to Enable SIEM and click the Configure button\n5. Select Microsoft Sentinel as the Application and complete the configuration using the information below\n6. Click Save to activate the connection\n"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""NC Protect"", ""description"": ""You must have a running instance of NC Protect for O365. Please [contact us](https://www.archtis.com/data-discovery-classification-protection-software-secure-collaboration/).""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/archTIS/Data%20Connectors/NucleusCyberNCProtect.json","true" -"","iboss","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/iboss","iboss","iboss-sentinel-connector","2022-02-15","","","iboss","Partner","https://www.iboss.com/contact-us/","","domains","","","","","","","false","","false" -"CommonSecurityLog","iboss","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/iboss","iboss","iboss-sentinel-connector","2022-02-15","","","iboss","Partner","https://www.iboss.com/contact-us/","","domains","iboss","iboss","[Deprecated] iboss via Legacy Agent","The [iboss](https://www.iboss.com) data connector enables you to seamlessly connect your Threat Console to Microsoft Sentinel and enrich your instance with iboss URL event logs. Our logs are forwarded in Common Event Format (CEF) over Syslog and the configuration required can be completed on the iboss platform without the use of a proxy. Take advantage of our connector to garner critical data points and gain insight into security threats.","[{""title"": ""1. Configure a dedicated proxy Linux machine"", ""description"": ""If using the iboss gov environment or there is a preference to forward the logs to a dedicated proxy Linux machine, proceed with this step. In all other cases, please advance to step two."", ""innerSteps"": [{""title"": ""1.1 Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace""}, {""title"": ""1.2 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the dedicated proxy Linux machine between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.3 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n> 2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs"", ""description"": ""Set your Threat Console to send Syslog messages in CEF format to your Azure workspace. Make note of your Workspace ID and Primary Key within your Log Analytics Workspace (Select the workspace from the Log Analytics workspaces menu in the Azure portal. Then select Agents management in the Settings section). \n\n>1. Navigate to Reporting & Analytics inside your iboss Console\n\n>2. Select Log Forwarding -> Forward From Reporter\n\n>3. Select Actions -> Add Service\n\n>4. Toggle to Microsoft Sentinel as a Service Type and input your Workspace ID/Primary Key along with other criteria. If a dedicated proxy Linux machine has been configured, toggle to Syslog as a Service Type and configure the settings to point to your dedicated proxy Linux machine\n\n>5. Wait one to two minutes for the setup to complete\n\n>6. Select your Microsoft Sentinel Service and verify the Microsoft Sentinel Setup Status is Successful. If a dedicated proxy Linux machine has been configured, you may proceed with validating your connection""}, {""title"": ""3. Validate connection"", ""description"": ""Open Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace""}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy (Only applicable if a dedicated proxy Linux machine has been configured).\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/iboss/Data%20Connectors/iboss_cef.json","true" -"CommonSecurityLog","iboss","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/iboss","iboss","iboss-sentinel-connector","2022-02-15","","","iboss","Partner","https://www.iboss.com/contact-us/","","domains","ibossAma","iboss","iboss via AMA","The [iboss](https://www.iboss.com) data connector enables you to seamlessly connect your Threat Console to Microsoft Sentinel and enrich your instance with iboss URL event logs. Our logs are forwarded in Common Event Format (CEF) over Syslog and the configuration required can be completed on the iboss platform without the use of a proxy. Take advantage of our connector to garner critical data points and gain insight into security threats.","[{""title"": ""Configure AMA Data Connector"", ""description"": ""Steps to configure the iboss AMA Data Connector"", ""instructions"": [{""parameters"": {""title"": ""Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Gather Required Configuration Details in Azure Arc"", ""description"": ""1. Navigate to Azure Arc ---> Azure Arc Resources ---> Machines.\n\n2. Add a machine ---> Add a single server ---> Generate script.\n\n3. Select the resource group, this should be the same group as the Log Analytics Workspace for your Microsoft Sentinel instance you will be using\n\n4. Select a region and ensure it is in the same region as your Log Analytics Workspace\n\n5. Select Linux as Operating System\n\n6. Click Next\n\n7. Download the script and use this information for the next step when configuring your Microsoft Sentinel AMA integration iboss side.\n\n8. Navigate to the Log Analytics Workspace of your Microsoft Sentinel instance and find it's resource group, workspace name, and workspace id""}, {""title"": ""Step B. Forward Common Event Format (CEF) logs"", ""description"": ""Set your Threat Console to send Syslog messages in CEF format to your Azure workspace. (Ensure you have the information gathered from the previous section)\n\n>1. Navigate to the Integrations Marketplace inside your iboss Console\n\n>2. Select Microsoft Sentinel AMA Log Forwarding\n\n>3. Select Add Integration\n\n4. Use the information from the script and your log analytics workspace to configure the integration.\n\n5. Add the integration\n\n>6. An email with be sent to your iboss alerts email to authenticate. Please do so within five minutes\n\n7. After authenticating, wait 15 to 20 minutes and ensure the Microsoft Sentinel Status of your integration is successful.""}, {""title"": ""Step C. Validate connection"", ""description"": ""1. Follow the instructions to validate your connectivity:\n\n2. Open Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n3. It may take about 20 minutes until the connection streams data to your workspace.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/iboss/Data%20Connectors/template_ibossAMA.json","true" -"","vArmour Application Controller","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/vArmour%20Application%20Controller","varmournetworks","varmour_sentinel","2022-06-01","","","vArmour Networks","Partner","https://www.varmour.com/contact-us/","","domains","","","","","","","false","","false" -"CommonSecurityLog","vArmour Application Controller","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/vArmour%20Application%20Controller","varmournetworks","varmour_sentinel","2022-06-01","","","vArmour Networks","Partner","https://www.varmour.com/contact-us/","","domains","vArmourAC","vArmour","[Deprecated] vArmour Application Controller via Legacy Agent","vArmour reduces operational risk and increases cyber resiliency by visualizing and controlling application relationships across the enterprise. This vArmour connector enables streaming of Application Controller Violation Alerts into Microsoft Sentinel, so you can take advantage of search & correlation, alerting, & threat intelligence enrichment for each log.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Configure the vArmour Application Controller to forward Common Event Format (CEF) logs to the Syslog agent"", ""description"": ""Send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address."", ""innerSteps"": [{""title"": ""2.1 Download the vArmour Application Controller user guide"", ""description"": ""Download the user guide from https://support.varmour.com/hc/en-us/articles/360057444831-vArmour-Application-Controller-6-0-User-Guide.""}, {""title"": ""2.2 Configure the Application Controller to Send Policy Violations"", ""description"": ""In the user guide - refer to \""Configuring Syslog for Monitoring and Violations\"" and follow steps 1 to 3.""}]}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/vArmour%20Application%20Controller/Data%20Connectors/Connector_vArmour_AppController_CEF.json","true" -"CommonSecurityLog","vArmour Application Controller","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/vArmour%20Application%20Controller","varmournetworks","varmour_sentinel","2022-06-01","","","vArmour Networks","Partner","https://www.varmour.com/contact-us/","","domains","vArmourACAma","vArmour","[Deprecated] vArmour Application Controller via AMA","vArmour reduces operational risk and increases cyber resiliency by visualizing and controlling application relationships across the enterprise. This vArmour connector enables streaming of Application Controller Violation Alerts into Microsoft Sentinel, so you can take advantage of search & correlation, alerting, & threat intelligence enrichment for each log.","[{""title"": """", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine""}, {""title"": ""Step B. Configure the vArmour Application Controller to forward Common Event Format (CEF) logs to the Syslog agent"", ""description"": ""Send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address."", ""innerSteps"": [{""title"": ""1 Download the vArmour Application Controller user guide"", ""description"": ""Download the user guide from https://support.varmour.com/hc/en-us/articles/360057444831-vArmour-Application-Controller-6-0-User-Guide.""}, {""title"": ""2 Configure the Application Controller to Send Policy Violations"", ""description"": ""In the user guide - refer to \""Configuring Syslog for Monitoring and Violations\"" and follow steps 1 to 3.""}]}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","false","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/vArmour%20Application%20Controller/Data%20Connectors/template_vArmour_AppControllerAMA.json","true" +"Table","solution_name","solution_folder","solution_publisher_id","solution_offer_id","solution_first_publish_date","solution_last_publish_date","solution_version","solution_support_name","solution_support_tier","solution_support_link","solution_author_name","solution_categories","connector_id","connector_publisher","connector_title","connector_description","connector_instruction_steps","connector_permissions","connector_files","is_unique" +"OnePasswordEventLogs_CL","1Password","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/1Password","1password1617200969773","azure-sentinel-solution-1password","2023-12-01","","","1Password","Partner","https://support.1password.com/","","domains","1Password","1Password","1Password","The [1Password](https://www.1password.com) solution for Microsoft Sentinel enables you to ingest 1Password logs and events into Microsoft Sentinel. The connector provides visibility into 1Password Events and Alerts in Microsoft Sentinel to improve monitoring and investigation capabilities.

**Underlying Microsoft Technologies used:**

This solution takes a dependency on the following technologies, and some of these dependencies either may be in [Preview](https://azure.microsoft.com/support/legal/preview-supplemental-terms/) state or might result in additional ingestion or operational costs:

- [Azure Functions](https://azure.microsoft.com/services/functions/#overview)","[{""description"": "">**NOTE:** This connector uses Azure Functions to connect to 1Password to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**STEP 1 - Configuration steps for the 1Password API**\n\n [Follow these instructions](https://support.1password.com/events-reporting/#appendix-issue-or-revoke-bearer-tokens) provided by 1Password to obtain an API Token. **Note:** A 1Password account is required""}, {""description"": ""**STEP 2 - Deploy the functionApp using DeployToAzure button to create the table, dcr and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the 1Password connector, a custom table needs to be created.""}, {""description"": ""This method provides an automated deployment of the 1Password connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-OnePassword-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace Name**, **Workspace Name**, **API Key**, and **URI**.\n - The default **Time Interval** is set to pull the last five (5) minutes of data. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion.\n - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy."", ""title"": ""Option 1 - Azure Resource Manager (ARM) Template""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""1Password API Token"", ""description"": ""A 1Password API Token is required. [See the documentation to learn more about the 1Password API](https://developer.1password.com/docs/events-api/reference). **Note:** A 1Password account is required""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/1Password/Data%20Connectors/deployment/1Password_data_connector.json","true" +"OnePasswordEventLogs_CL","1Password","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/1Password","1password1617200969773","azure-sentinel-solution-1password","2023-12-01","","","1Password","Partner","https://support.1password.com/","","domains","1Password","1Password","1Password","The [1Password](https://www.1password.com) solution for Microsoft Sentinel enables you to ingest sign-in attempts, item usage, and audit events from your 1Password Business account using the [1Password Events Reporting API](https://developer.1password.com/docs/events-api). This allows you to monitor and investigate events in 1Password in Microsoft Sentinel along with the other applications and services your organization uses.

**Underlying Microsoft Technologies used:**

This solution depends on the following technologies, and some of which may be in [Preview](https://azure.microsoft.com/support/legal/preview-supplemental-terms/) state or may incur additional ingestion or operational costs:

- [Azure Functions](https://azure.microsoft.com/services/functions/#overview)","[{""description"": "">**NOTE:** This connector uses Azure Functions to connect to 1Password to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs from Azure. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**STEP 1 - Configuration steps for the 1Password Events Reporting API**\n\n [Follow these instructions](https://support.1password.com/events-reporting/#appendix-issue-or-revoke-bearer-tokens) provided by 1Password to obtain an Events Reporting API Token. **Note:** A 1Password Business account is required""}, {""description"": ""**STEP 2 - Deploy the functionApp using DeployToAzure button to create the table, dcr and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the 1Password connector, a custom table needs to be created.""}, {""description"": ""This method provides an automated deployment of the 1Password connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-OnePassword-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace Name**, **Workspace Name**, **1Password Events API Key**, and **URI**.\n - The default **Time Interval** is set to five (5) minutes. If you'd like to modify the interval, you can adjust the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion.\n - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy."", ""title"": ""Option 1 - Azure Resource Manager (ARM) Template""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""1Password Events API Token"", ""description"": ""A 1Password Events API Token is required. [See the documentation to learn more about the 1Password API](https://developer.1password.com/docs/events-api/reference). \n\n**Note:** A 1Password Business account is required""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/1Password/Data%20Connectors/1Password_API_FunctionApp.json","true" +"OnePasswordEventLogs_CL","1Password","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/1Password","1password1617200969773","azure-sentinel-solution-1password","2023-12-01","","","1Password","Partner","https://support.1password.com/","","domains","1PasswordCCPDefinition","1Password","1Password (Serverless)","The 1Password CCP connector allows the user to ingest 1Password Audit, Signin & ItemUsage events into Microsoft Sentinel.","[{""title"": ""STEP 1 - Create a 1Password API token:"", ""description"": ""Follow the [1Password documentation](https://support.1password.com/events-reporting/#appendix-issue-or-revoke-bearer-tokens) for guidance on this step.""}, {""title"": ""STEP 2 - Choose the correct base URL:"", ""description"": ""There are multiple 1Password servers which might host your events. The correct server depends on your license and region. Follow the [1Password documentation](https://developer.1password.com/docs/events-api/reference/#servers) to choose the correct server. Input the base URL as displayed by the documentation (including 'https://' and without a trailing '/').""}, {""title"": ""STEP 3 - Enter your 1Password Details:"", ""description"": ""Enter the 1Password base URL & API Token below:"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Base Url"", ""placeholder"": ""Enter your Base Url"", ""type"": ""text"", ""name"": ""BaseUrl""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Token"", ""placeholder"": ""Enter your API Token"", ""type"": ""password"", ""name"": ""ApiToken""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""1Password API token"", ""description"": ""A 1Password API Token is required. See the [1Password documentation](https://support.1password.com/events-reporting/#appendix-issue-or-revoke-bearer-tokens) on how to create an API token.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/1Password/Data%20Connectors/1Password_ccpv2/1Password_DataConnectorDefinition.json","true" +"apifirewall_log_1_CL","42Crunch API Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/42Crunch%20API%20Protection","42crunch1580391915541","42crunch_sentinel_solution","2022-09-21","","","42Crunch API Protection","Partner","https://42crunch.com/","","domains","42CrunchAPIProtection","42Crunch","API Protection","Connects the 42Crunch API protection to Azure Log Analytics via the REST API interface","[{""title"": ""Step 1 : Read the detailed documentation"", ""description"": ""The installation process is documented in great detail in the GitHub repository [Microsoft Sentinel integration](https://github.com/42Crunch/azure-sentinel-integration). The user should consult this repository further to understand installation and debug of the integration.""}, {""title"": ""Step 2: Retrieve the workspace access credentials"", ""description"": ""The first installation step is to retrieve both your **Workspace ID** and **Primary Key** from the Microsoft Sentinel platform.\nCopy the values shown below and save them for configuration of the API log forwarder integration."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Step 3: Install the 42Crunch protection and log forwarder"", ""description"": ""The next step is to install the 42Crunch protection and log forwarder to protect your API. Both components are availabe as containers from the [42Crunch repository](https://hub.docker.com/u/42crunch). The exact installation will depend on your environment, consult the [42Crunch protection documentation](https://docs.42crunch.com/latest/content/concepts/api_firewall_deployment_architecture.htm) for full details. Two common installation scenarios are described below:\n"", ""innerSteps"": [{""title"": ""Installation via Docker Compose"", ""description"": ""The solution can be installed using a [Docker compose file](https://github.com/42Crunch/azure-sentinel-integration/blob/main/sample-deployment/docker-compose.yml).""}, {""title"": ""Installation via Helm charts"", ""description"": ""The solution can be installed using a [Helm chart](https://github.com/42Crunch/azure-sentinel-integration/tree/main/helm/sentinel).""}]}, {""title"": ""Step 4: Test the data ingestion"", ""description"": ""In order to test the data ingestion the user should deploy the sample *httpbin* application alongside the 42Crunch protection and log forwarder [described in detail here](https://github.com/42Crunch/azure-sentinel-integration/tree/main/sample-deployment)."", ""innerSteps"": [{""title"": ""4.1 Install the sample"", ""description"": ""The sample application can be installed locally using a [Docker compose file](https://github.com/42Crunch/azure-sentinel-integration/blob/main/sample-deployment/docker-compose.yml) which will install the httpbin API server, the 42Crunch API protection and the Microsoft Sentinel log forwarder. Set the environment variables as required using the values copied from step 2.""}, {""title"": ""4.2 Run the sample"", ""description"": ""Verfify the API protection is connected to the 42Crunch platform, and then exercise the API locally on the *localhost* at port 8080 using Postman, curl, or similar. You should see a mixture of passing and failing API calls. ""}, {""title"": ""4.3 Verify the data ingestion on Log Analytics"", ""description"": ""After approximately 20 minutes access the Log Analytics workspace on your Microsoft Sentinel installation, and locate the *Custom Logs* section verify that a *apifirewall_log_1_CL* table exists. Use the sample queries to examine the data.""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/42Crunch%20API%20Protection/Data%20Connectors/42CrunchAPIProtection.json","true" +"CommonSecurityLog","AI Analyst Darktrace","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AI%20Analyst%20Darktrace","darktrace1655286944672","darktrace_mss","2022-05-02","","","Darktrace","Partner","https://www.darktrace.com/en/contact/","","domains","Darktrace","Darktrace","[Deprecated] AI Analyst Darktrace via Legacy Agent","The Darktrace connector lets users connect Darktrace Model Breaches in real-time with Microsoft Sentinel, allowing creation of custom Dashboards, Workbooks, Notebooks and Custom Alerts to improve investigation. Microsoft Sentinel's enhanced visibility into Darktrace logs enables monitoring and mitigation of security threats.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Configure Darktrace to forward Syslog messages in CEF format to your Azure workspace via the Syslog agent. \n\n 1) Within the Darktrace Threat Visualizer, navigate to the System Config page in the main menu under Admin. \n\n 2) From the left-hand menu, select Modules and choose Microsoft Sentinel from the available Workflow Integrations.\\n 3) A configuration window will open. Locate Microsoft Sentinel Syslog CEF and click New to reveal the configuration settings, unless already exposed. \n\n 4) In the Server configuration field, enter the location of the log forwarder and optionally modify the communication port. Ensure that the port selected is set to 514 and is allowed by any intermediary firewalls. \n\n 5) Configure any alert thresholds, time offsets or additional settings as required. \n\n 6) Review any additional configuration options you may wish to enable that alter the Syslog syntax.\n\n 7) Enable Send Alerts and save your changes.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AI%20Analyst%20Darktrace/Data%20Connectors/AIA-Darktrace.json","true" +"CommonSecurityLog","AI Analyst Darktrace","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AI%20Analyst%20Darktrace","darktrace1655286944672","darktrace_mss","2022-05-02","","","Darktrace","Partner","https://www.darktrace.com/en/contact/","","domains","DarktraceAma","Darktrace","[Deprecated] AI Analyst Darktrace via AMA","The Darktrace connector lets users connect Darktrace Model Breaches in real-time with Microsoft Sentinel, allowing creation of custom Dashboards, Workbooks, Notebooks and Custom Alerts to improve investigation. Microsoft Sentinel's enhanced visibility into Darktrace logs enables monitoring and mitigation of security threats.","[{""title"": """", ""description"": """", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine""}, {""title"": ""Step B. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Configure Darktrace to forward Syslog messages in CEF format to your Azure workspace via the Syslog agent. \n\n 1) Within the Darktrace Threat Visualizer, navigate to the System Config page in the main menu under Admin. \n\n 2) From the left-hand menu, select Modules and choose Microsoft Sentinel from the available Workflow Integrations.\\n 3) A configuration window will open. Locate Microsoft Sentinel Syslog CEF and click New to reveal the configuration settings, unless already exposed. \n\n 4) In the Server configuration field, enter the location of the log forwarder and optionally modify the communication port. Ensure that the port selected is set to 514 and is allowed by any intermediary firewalls. \n\n 5) Configure any alert thresholds, time offsets or additional settings as required. \n\n 6) Review any additional configuration options you may wish to enable that alter the Syslog syntax.\n\n 7) Enable Send Alerts and save your changes.""}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AI%20Analyst%20Darktrace/Data%20Connectors/template_AIA-DarktraceAMA.json","true" +"AIShield_CL","AIShield AI Security Monitoring","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AIShield%20AI%20Security%20Monitoring","rbei","bgsw_aishield_sentinel","2022-01-11","2025-03-06","","AIShield","Partner","https://azuremarketplace.microsoft.com/marketplace/apps/rbei.bgsw_aishield_product/","","domains","BoschAIShield","Bosch","AIShield","[AIShield](https://www.boschaishield.com/) connector allows users to connect with AIShield custom defense mechanism logs with Microsoft Sentinel, allowing the creation of dynamic Dashboards, Workbooks, Notebooks and tailored Alerts to improve investigation and thwart attacks on AI systems. It gives users more insight into their organization's AI assets security posturing and improves their AI systems security operation capabilities.AIShield.GuArdIan analyzes the LLM generated content to identify and mitigate harmful content, safeguarding against legal, policy, role based, and usage based violations","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**AIShield**](https://aka.ms/sentinel-boschaishield-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": ""\n>**IMPORTANT:** Before deploying the AIShield Connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Note"", ""description"": ""Users should have utilized AIShield SaaS offering to conduct vulnerability analysis and deployed custom defense mechanisms generated along with their AI asset. [**Click here**](https://azuremarketplace.microsoft.com/marketplace/apps/rbei.bgsw_aishield_product) to know more or get in touch.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AIShield%20AI%20Security%20Monitoring/Data%20Connectors/AIShieldConnector.json","true" +"Event","ALC-WebCTRL","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ALC-WebCTRL","azuresentinel","azure-sentinel-solution-automated-logic-webctrl","2021-11-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AutomatedLogicWebCTRL","AutomatedLogic","Automated Logic WebCTRL ","You can stream the audit logs from the WebCTRL SQL server hosted on Windows machines connected to your Microsoft Sentinel. This connection enables you to view dashboards, create custom alerts and improve investigation. This gives insights into your Industrial Control Systems that are monitored or controlled by the WebCTRL BAS application.","[{""title"": ""1. Install and onboard the Microsoft agent for Windows."", ""description"": ""Learn about [agent setup](https://docs.microsoft.com/services-hub/health/mma-setup) and [windows events onboarding](https://docs.microsoft.com/azure/azure-monitor/agents/data-sources-windows-events). \n\n You can skip this step if you have already installed the Microsoft agent for Windows""}, {""title"": ""2. Configure Windows task to read the audit data and write it to windows events"", ""description"": ""Install and configure the Windows Scheduled Task to read the audit logs in SQL and write them as Windows Events. These Windows Events will be collected by the agent and forward to Microsoft Sentinel.\n\n> Notice that the data from all machines will be stored in the selected workspace"", ""innerSteps"": [{""title"": """", ""description"": ""2.1 Copy the [setup files](https://aka.ms/sentinel-automatedlogicwebctrl-tasksetup) to a location on the server.""}, {""title"": """", ""description"": ""2.2 Update the [ALC-WebCTRL-AuditPull.ps1](https://aka.ms/sentinel-automatedlogicwebctrl-auditpull) (copied in above step) script parameters like the target database name and windows event id's. Refer comments in the script for more details.""}, {""title"": """", ""description"": ""2.3 Update the windows task settings in the [ALC-WebCTRL-AuditPullTaskConfig.xml](https://aka.ms/sentinel-automatedlogicwebctrl-auditpulltaskconfig) file that was copied in above step as per requirement. Refer comments in the file for more details.""}, {""title"": """", ""description"": ""2.4 Install windows tasks using the updated configs copied in the above steps"", ""instructions"": [{""parameters"": {""label"": ""Run the following command in powershell from the directory where the setup files are copied in step 2.1"", ""value"": ""schtasks.exe /create /XML \""ALC-WebCTRL-AuditPullTaskConfig.xml\"" /tn \""ALC-WebCTRL-AuditPull\""""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the Event schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, validate below steps for any run time issues:\n\n> 1. Make sure that the scheduled task is created and is in running state in the Windows Task Scheduler.\n\n>2. Check for task execution errors in the history tab in Windows Task Scheduler for the newly created task in step 2.4\n\n>3. Make sure that the SQL Audit table consists new records while the scheduled windows task runs.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ALC-WebCTRL/Data%20Connectors/Connector_WindowsEvents_WebCTRL.json","true" +"ARGOS_CL","ARGOSCloudSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ARGOSCloudSecurity","argoscloudsecurity1605618416175","argos-sentinel","2022-08-16","","","ARGOS Cloud Security","Partner","https://argos-security.io/contact-us","","domains","ARGOSCloudSecurity","ARGOS Cloud Security","ARGOS Cloud Security","The ARGOS Cloud Security integration for Microsoft Sentinel allows you to have all your important cloud security events in one place. This enables you to easily create dashboards, alerts, and correlate events across multiple systems. Overall this will improve your organization's security posture and security incident response.","[{""title"": ""1. Subscribe to ARGOS"", ""description"": ""Ensure you already own an ARGOS Subscription. If not, browse to [ARGOS Cloud Security](https://argos-security.io) and sign up to ARGOS.\n\nAlternatively, you can also purchase ARGOS via the [Azure Marketplace](https://azuremarketplace.microsoft.com/en-au/marketplace/apps/argoscloudsecurity1605618416175.argoscloudsecurity?tab=Overview).""}, {""title"": ""2. Configure Sentinel integration from ARGOS"", ""description"": ""Configure ARGOS to forward any new detections to your Sentinel workspace by providing ARGOS with your Workspace ID and Primary Key.\n\nThere is **no need to deploy any custom infrastructure**.\n\nEnter the information into the [ARGOS Sentinel](https://app.argos-security.io/account/sentinel) configuration page.\n\nNew detections will automatically be forwarded.\n\n[Learn more about the integration](https://www.argos-security.io/resources#integrations)"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ARGOSCloudSecurity/Data%20Connectors/Connector_ARGOS.json","true" +"AWSCloudFront_AccessLog_CL","AWS CloudFront","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AWS%20CloudFront","azuresentinel","azure-sentinel-solution-aws-cloudfront","2025-03-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AwsCloudfrontCcpDefinition","Microsoft","Amazon Web Services CloudFront (via Codeless Connector Framework) (Preview)","This data connector enables the integration of AWS CloudFront logs with Microsoft Sentinel to support advanced threat detection, investigation, and security monitoring. By utilizing Amazon S3 for log storage and Amazon SQS for message queuing, the connector reliably ingests CloudFront access logs into Microsoft Sentinel","[{""title"": ""Ingesting AWS CloudFront logs in Microsoft Sentinel"", ""description"": ""### List of Resources Required:\n\n* Open ID Connect (OIDC) web identity provider\n* IAM Role\n* Amazon S3 Bucket\n* Amazon SQS\n* AWS CloudFront configuration\n\n"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. AWS CloudFormation Deployment \n To configure access on AWS, two templates has been generated to set up the AWS environment to send logs from an S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). \n 2. Choose the \u2018**Specify template**\u2019 option, then \u2018**Upload a template file**\u2019 by clicking on \u2018**Choose file**\u2019 and selecting the appropriate CloudFormation template file provided below. click \u2018**Choose file**\u2019 and select the downloaded template. \n 3. Click '**Next**' and '**Create stack**'.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWSCloudFront resources deployment"", ""isMultiLine"": true, ""fillWith"": [""AWSCloudFront""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AWS%20CloudFront/Data%20Connectors/AWSCloudFrontLog_CCF/AWSCloudFrontLog_ConnectorDefinition.json","true" +"AWSSecurityHubFindings","AWS Security Hub","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AWS%20Security%20Hub","azuresentinel","azure-sentinel-solution-awssecurityhub","2025-03-12","2025-03-12","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AwsSecurityHubFindingsCcpDefinition","Microsoft","AWS Security Hub Findings (via Codeless Connector Framework)","This connector enables the ingestion of AWS Security Hub Findings, which are collected in AWS S3 buckets, into Microsoft Sentinel. It helps streamline the process of monitoring and managing security alerts by integrating AWS Security Hub Findings with Microsoft Sentinel's advanced threat detection and response capabilities.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""### 1. AWS CloudFormation Deployment \n Use the provided CloudFormation templates to configure the AWS environment for sending logs from AWS Security Hub to your Log Analytics Workspace.\n""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### Deploy CloudFormation Templates in AWS: \n1. Navigate to the [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create).\n2. Click **Create stack** and select **With new resources**.\n3. Choose **Upload a template file**, then click **Choose file** to upload the appropriate CloudFormation template provided.\n4. Follow the prompts and click **Next** to complete the stack creation.\n5. After the stacks are created, note down the **Role ARN** and **SQS Queue URL**.\n""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID Connect authentication provider deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS Security Hub resources deployment"", ""isMultiLine"": true, ""fillWith"": [""AwsSecurityHub""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""### 2. Connect new collectors \n To enable AWS Security Hub Connector for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS Security Hub connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": false, ""write"": false, ""delete"": false, ""action"": true}}], ""customs"": [{""name"": ""Environment"", ""description"": ""You must have the following AWS resources defined and configured: AWS Security Hub, Amazon Data Firehose, Amazon EventBridge, S3 Bucket, Simple Queue Service (SQS), IAM roles and permissions policies.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AWS%20Security%20Hub/Data%20Connectors/AWSSecurityHubFindings_CCP/AWSSecurityHubFindings_DataConnectorDefinition.json","true" +"","AWS Systems Manager","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AWS%20Systems%20Manager","azuresentinel","azure-sentinel-solution-awssystemsmanager","","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"AWSVPCFlow","AWS VPC Flow Logs","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AWS%20VPC%20Flow%20Logs","azuresentinel","azure-sentinel-solution-awsvpcflowlogs","2025-07-30","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AWSS3VPCFlowLogsParquetDefinition","Microsoft","Amazon Web Services S3 VPC Flow Logs","This connector allows you to ingest AWS VPC Flow Logs, collected in AWS S3 buckets, to Microsoft Sentinel. AWS VPC Flow Logs provide visibility into network traffic within your AWS Virtual Private Cloud (VPC), enabling security analysis and network monitoring.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. AWS CloudFormation Deployment \n To configure access on AWS, two templates have been generated to set up the AWS environment to send VPC Flow Logs from an S3 bucket to your Log Analytics Workspace.\n #### For each template, create a Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). \n 2. Choose the \u2018Specify template\u2019 option, then \u2018Upload a template file\u2019 by clicking on \u2018Choose file\u2019 and selecting the appropriate CloudFormation template file provided below. Click \u2018Choose file\u2019 and select the downloaded template. \n 3. Click 'Next' and 'Create stack'.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS VPC Flow Logs resources deployment"", ""isMultiLine"": true, ""fillWith"": [""AwsVPCFlow""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the 'Add new collector' button, fill in the required information and click on 'Connect'""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.fileFormat"", ""columnName"": ""File Format""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS VPC Flow Logs connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""fileFormat"", ""required"": true, ""placeholder"": ""Select a file format"", ""options"": [{""key"": ""Json"", ""text"": ""JSON Format""}, {""key"": ""Parquet"", ""text"": ""Parquet Format""}, {""key"": ""Csv"", ""text"": ""CSV Format""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AWS%20VPC%20Flow%20Logs/Data%20Connectors/AWSVPCFlowLogs_CCP/AWSVPCFlowLogs_DataConnectorDefinition.json","true" +"","AWSAthena","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AWSAthena","azuresentinel","azure-sentinel-solution-awsathena","2022-11-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"AWSS3ServerAccess","AWS_AccessLogs","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AWS_AccessLogs","azuresentinel","azure-sentinel-solution-awsaccesslogs","2025-02-06","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AwsS3ServerAccessLogsDefinition","Microsoft","AWS S3 Server Access Logs (via Codeless Connector Framework)","This connector allows you to ingest AWS S3 Server Access Logs into Microsoft Sentinel. These logs contain detailed records for requests made to S3 buckets, including the type of request, resource accessed, requester information, and response details. These logs are useful for analyzing access patterns, debugging issues, and ensuring security compliance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""### 1. AWS CloudFormation Deployment \n To configure access on AWS, two templates has been generated to set up the AWS environment to send logs from an AWS S3 Server Access logs to your Log Analytics Workspace.\n""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### Deploy CloudFormation Templates in AWS: \n1. Navigate to the [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create).\n2. Click **Create stack** and select **With new resources**.\n3. Choose **Upload a template file**, then click **Choose file** to upload the appropriate CloudFormation template provided.\n4. Follow the prompts and click **Next** to complete the stack creation.\n5. After the stacks are created, note down the **Role ARN** and **SQS Queue URL**.\n""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID Connect authentication provider deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS Server Access resources deployment"", ""isMultiLine"": true, ""fillWith"": [""AWSS3ServerAccess""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""### 2. Connect new collectors \n To enable AWS S3 Server Access Logs Connector for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new collector"", ""subtitle"": ""AWS Server Access Logs connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""providerDisplayName"": ""Workspace"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": false, ""write"": false, ""delete"": false, ""action"": true}}], ""customs"": [{""name"": ""Environment"", ""description"": ""You must have the following AWS resources defined and configured: S3 Bucket, Simple Queue Service (SQS), IAM roles and permissions policies.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AWS_AccessLogs/Data%20Connectors/AwsS3ServerAccessLogsDefinition_CCP/AWSS3ServerAccessLogs_ConnectorDefinition.json","true" +"","AWS_IAM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AWS_IAM","azuresentinel","azure-sentinel-solution-amazonwebservicesiam","2022-09-28","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"ABNORMAL_CASES_CL","AbnormalSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AbnormalSecurity","abnormalsecuritycorporation1593011233180","fe1b4806-215b-4610-bf95-965a7a65579c","2021-10-20","","","Abnormal Security","Partner","https://abnormalsecurity.com/contact","","domains","AbnormalSecurity","AbnormalSecurity","AbnormalSecurity ","The Abnormal Security data connector provides the capability to ingest threat and case logs into Microsoft Sentinel using the [Abnormal Security Rest API.](https://app.swaggerhub.com/apis/abnormal-security/abx/)","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to Abnormal Security's REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Abnormal Security API**\n\n [Follow these instructions](https://app.swaggerhub.com/apis/abnormal-security/abx) provided by Abnormal Security to configure the REST API integration. **Note:** An Abnormal Security account is required""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Abnormal Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Abnormal Security API Authorization Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""This method provides an automated deployment of the Abnormal Security connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-abnormalsecurity-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Microsoft Sentinel Workspace ID**, **Microsoft Sentinel Shared Key** and **Abnormal Security REST API Key**.\n - The default **Time Interval** is set to pull the last five (5) minutes of data. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion.\n 4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Abnormal Security data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-abnormalsecurity-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. AbnormalSecurityXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tSENTINEL_WORKSPACE_ID\n\t\tSENTINEL_SHARED_KEY\n\t\tABNORMAL_SECURITY_REST_API_TOKEN\n\t\tlogAnalyticsUri (optional)\n(add any other settings required by the Function App)\nSet the `uri` value to: `` \n>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Azure Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us.` \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Abnormal Security API Token"", ""description"": ""An Abnormal Security API Token is required. [See the documentation to learn more about Abnormal Security API](https://app.swaggerhub.com/apis/abnormal-security/abx/). **Note:** An Abnormal Security account is required""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AbnormalSecurity/Data%20Connectors/AbnormalSecurity_API_FunctionApp.json","true" +"ABNORMAL_THREAT_MESSAGES_CL","AbnormalSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AbnormalSecurity","abnormalsecuritycorporation1593011233180","fe1b4806-215b-4610-bf95-965a7a65579c","2021-10-20","","","Abnormal Security","Partner","https://abnormalsecurity.com/contact","","domains","AbnormalSecurity","AbnormalSecurity","AbnormalSecurity ","The Abnormal Security data connector provides the capability to ingest threat and case logs into Microsoft Sentinel using the [Abnormal Security Rest API.](https://app.swaggerhub.com/apis/abnormal-security/abx/)","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to Abnormal Security's REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Abnormal Security API**\n\n [Follow these instructions](https://app.swaggerhub.com/apis/abnormal-security/abx) provided by Abnormal Security to configure the REST API integration. **Note:** An Abnormal Security account is required""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Abnormal Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Abnormal Security API Authorization Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""This method provides an automated deployment of the Abnormal Security connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-abnormalsecurity-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Microsoft Sentinel Workspace ID**, **Microsoft Sentinel Shared Key** and **Abnormal Security REST API Key**.\n - The default **Time Interval** is set to pull the last five (5) minutes of data. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion.\n 4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Abnormal Security data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-abnormalsecurity-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. AbnormalSecurityXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tSENTINEL_WORKSPACE_ID\n\t\tSENTINEL_SHARED_KEY\n\t\tABNORMAL_SECURITY_REST_API_TOKEN\n\t\tlogAnalyticsUri (optional)\n(add any other settings required by the Function App)\nSet the `uri` value to: `` \n>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Azure Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us.` \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Abnormal Security API Token"", ""description"": ""An Abnormal Security API Token is required. [See the documentation to learn more about Abnormal Security API](https://app.swaggerhub.com/apis/abnormal-security/abx/). **Note:** An Abnormal Security account is required""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AbnormalSecurity/Data%20Connectors/AbnormalSecurity_API_FunctionApp.json","true" +"","AbuseIPDB","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AbuseIPDB","azuresentinel","azure-sentinel-solution-abuseipdb","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"","Acronis Cyber Protect Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Acronis%20Cyber%20Protect%20Cloud","acronisinternationalgmbh","azure-sentinel-solution-acronis-cyber-protect","2025-10-28","2025-10-28","","Acronis International GmbH","Partner","https://www.acronis.com/en/support","","domains,verticals","","","","","","","","false" +"agari_apdpolicy_log_CL","Agari","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Agari","agari","Agari_MSS","2022-05-02","","","Agari","Partner","https://support.agari.com/hc/en-us/articles/360000645632-How-to-access-Agari-Support","","domains","Agari","Agari","Agari Phishing Defense and Brand Protection","This connector uses a Agari REST API connection to push data into Azure Sentinel Log Analytics.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Agari APIs to pull its logs into Azure Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""STEP 1 - Get your Agari API credentials"", ""description"": ""\n1. Log into any Agari product (Client ID and Secret are the same for all applications) \n2. Click on your username in the upper right and select **Settings**\n3. Click on the **Generate API Secret** link to generate an API client_id and client_secret (the link will read **Regenerate API Secret** if you have already generated an API client ID/secret previously)\n4. Copy both the client_id and client_secret that are generated""}, {""title"": ""STEP 2 - (Optional) Enable the Security Graph API"", ""description"": ""Follow the instrcutions found on article [Connect Azure Sentinel to your threat intelligence platform](https://docs.microsoft.com/azure/sentinel/connect-threat-intelligence#connect-azure-sentinel-to-your-threat-intelligence-platform). Once the application is created you will need to record the Tenant ID, Client ID and Client Secret.""}, {""title"": ""STEP 3 - Deploy the connector and the associated Azure Function"", ""description"": ""\n>**IMPORTANT:** Before deploying the Agari Connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Agari API credentials from the previous step."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Choose a deployement option"", ""description"": """"}, {""title"": ""Option 1: Deploy using the Azure Resource Manager (ARM) Template"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-agari-azuredeploy) \n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **Agari Client ID**, **Agari Client Secret**, select `True` or `False` for the products you subscribe to, and if you wish to share IoCs with Sentinel, select `True` For **Enable Security Graph Sharing**, and enter the required IDs from the Azure Application.\n> - The Function App will request data from the Agari APIs every 5 minutes, corresponding to the Funciton App Timer.\n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n6. **NOTE:** Due to the use of Environment Variables to store log access times, the App requires 1 additonal manual step. In the Function App, select the Function App Name and select Click on **Identity** and for System assigned Identity, click on **Azure role assignments** and **Add Role assignment**. Select **Subscription** as the scope, select your subscription and set the Role to **Contributor**. Click on **Save**.""}, {""title"": ""Option 2: Manual Deployment of Azure Functions"", ""description"": ""**1. Create a Function App**\n\n1. From the Azure Portal, navigate to [Function App](https://portal.azure.com/#blade/HubsExtension/BrowseResource/resourceType/Microsoft.Web%2Fsites/kind/functionapp), and select **+ Add**.\n2. In the **Basics** tab, ensure Runtime stack is set to **Powershell Core**. \n3. In the **Hosting** tab, ensure the **Consumption (Serverless)** plan type is selected.\n4. Make other preferrable configuration changes, if needed, then click **Create**.""}, {""title"": """", ""description"": ""**2. Import Function App Code**\n\n1. In the newly created Function App, select **Functions** on the left pane and click **+ Add**.\n2. Click on **Code + Test** on the left pane. \n3. Copy the [Function App Code](https://aka.ms/sentinel-agari-functionapp) and paste into the Function App `run.ps1` editor.\n3. Click **Save**.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following eight to twelve (8-12) application settings individually, with their respective string values (case-sensitive): \n\t\tclientID\n\t\tclientSecret\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\tenableBrandProtectionAPI\n\t\tenablePhishingResponseAPI\n\t\tenablePhishingDefenseAPI\n\t\tresGroup\n\t\tfunctionName\n\t\tsubId\n\t\tenableSecurityGraphSharing\n\t\t<--- Required if enableSecurityGraphSharing is set to true --->\n\t\tGraphTenantId\n\t\tGraphClientId\n\t\tGraphClientSecret\n\t\tlogAnalyticsUri (optional)\n> - Enter your Agari ClientID and Secret in 'clientId' and 'clientSecret'\n> - Enter 'true' or 'false' for 'enablePhishingDefense', 'enableBrandProtection', 'enablePhishingResponse' as per your product subscriptions.\n> - Enter your Resource Group name in resGroup, the name of the Function (from previous step) in functionName and your Subscription ID in subId.\n> - Enter 'true' or 'false' for 'enableSecurtyGraphAPI'. If you are enabling the Security Graph, the 'GraphTenantId','GraphClientId', and 'GraphClientSecret' is required.\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n""}, {""title"": """", ""description"": ""**4. Set Permissions for the App**\n\n1. In the Function App, select the Function App Name and select Click on **Identity** and for System assigned Identity, set the status to On. \n\n2. Next, click on **Azure role assignments** and **Add Role assignment**. Select **Subscription** as the scope, select your subscription and set the Role to **Contributor**. Click on **Save**.""}, {""title"": """", ""description"": ""**5. Complete Setup.**\n\n1. Once all application settings have been entered, click **Save**. Note that it will take some time to have the required dependencies download, so you may see some inital failure messages.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Agari Phishing Defense, Phishing Response or Brand Protection API Client ID and Secret"", ""description"": ""Ensure you have your Client ID and Secret keys. Instructions can be found on the [Agari Developers Site](https://developers.agari.com/agari-platform/docs/quick-start).""}, {""name"": ""(Optional) Microsoft Security Graph API"", ""description"": ""The Agari Function App has the ability to share threat intelleigence with Sentinel via the Security Graph API. To use this feature, you will need to enable the [Sentinel Threat Intelligence Platforms connector](https://docs.microsoft.com/azure/sentinel/connect-threat-intelligence) as well as register an application in Azure Active Directory. ""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Agari/Data%20Connectors/Agari_API_FunctionApp.json","true" +"agari_apdtc_log_CL","Agari","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Agari","agari","Agari_MSS","2022-05-02","","","Agari","Partner","https://support.agari.com/hc/en-us/articles/360000645632-How-to-access-Agari-Support","","domains","Agari","Agari","Agari Phishing Defense and Brand Protection","This connector uses a Agari REST API connection to push data into Azure Sentinel Log Analytics.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Agari APIs to pull its logs into Azure Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""STEP 1 - Get your Agari API credentials"", ""description"": ""\n1. Log into any Agari product (Client ID and Secret are the same for all applications) \n2. Click on your username in the upper right and select **Settings**\n3. Click on the **Generate API Secret** link to generate an API client_id and client_secret (the link will read **Regenerate API Secret** if you have already generated an API client ID/secret previously)\n4. Copy both the client_id and client_secret that are generated""}, {""title"": ""STEP 2 - (Optional) Enable the Security Graph API"", ""description"": ""Follow the instrcutions found on article [Connect Azure Sentinel to your threat intelligence platform](https://docs.microsoft.com/azure/sentinel/connect-threat-intelligence#connect-azure-sentinel-to-your-threat-intelligence-platform). Once the application is created you will need to record the Tenant ID, Client ID and Client Secret.""}, {""title"": ""STEP 3 - Deploy the connector and the associated Azure Function"", ""description"": ""\n>**IMPORTANT:** Before deploying the Agari Connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Agari API credentials from the previous step."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Choose a deployement option"", ""description"": """"}, {""title"": ""Option 1: Deploy using the Azure Resource Manager (ARM) Template"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-agari-azuredeploy) \n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **Agari Client ID**, **Agari Client Secret**, select `True` or `False` for the products you subscribe to, and if you wish to share IoCs with Sentinel, select `True` For **Enable Security Graph Sharing**, and enter the required IDs from the Azure Application.\n> - The Function App will request data from the Agari APIs every 5 minutes, corresponding to the Funciton App Timer.\n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n6. **NOTE:** Due to the use of Environment Variables to store log access times, the App requires 1 additonal manual step. In the Function App, select the Function App Name and select Click on **Identity** and for System assigned Identity, click on **Azure role assignments** and **Add Role assignment**. Select **Subscription** as the scope, select your subscription and set the Role to **Contributor**. Click on **Save**.""}, {""title"": ""Option 2: Manual Deployment of Azure Functions"", ""description"": ""**1. Create a Function App**\n\n1. From the Azure Portal, navigate to [Function App](https://portal.azure.com/#blade/HubsExtension/BrowseResource/resourceType/Microsoft.Web%2Fsites/kind/functionapp), and select **+ Add**.\n2. In the **Basics** tab, ensure Runtime stack is set to **Powershell Core**. \n3. In the **Hosting** tab, ensure the **Consumption (Serverless)** plan type is selected.\n4. Make other preferrable configuration changes, if needed, then click **Create**.""}, {""title"": """", ""description"": ""**2. Import Function App Code**\n\n1. In the newly created Function App, select **Functions** on the left pane and click **+ Add**.\n2. Click on **Code + Test** on the left pane. \n3. Copy the [Function App Code](https://aka.ms/sentinel-agari-functionapp) and paste into the Function App `run.ps1` editor.\n3. Click **Save**.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following eight to twelve (8-12) application settings individually, with their respective string values (case-sensitive): \n\t\tclientID\n\t\tclientSecret\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\tenableBrandProtectionAPI\n\t\tenablePhishingResponseAPI\n\t\tenablePhishingDefenseAPI\n\t\tresGroup\n\t\tfunctionName\n\t\tsubId\n\t\tenableSecurityGraphSharing\n\t\t<--- Required if enableSecurityGraphSharing is set to true --->\n\t\tGraphTenantId\n\t\tGraphClientId\n\t\tGraphClientSecret\n\t\tlogAnalyticsUri (optional)\n> - Enter your Agari ClientID and Secret in 'clientId' and 'clientSecret'\n> - Enter 'true' or 'false' for 'enablePhishingDefense', 'enableBrandProtection', 'enablePhishingResponse' as per your product subscriptions.\n> - Enter your Resource Group name in resGroup, the name of the Function (from previous step) in functionName and your Subscription ID in subId.\n> - Enter 'true' or 'false' for 'enableSecurtyGraphAPI'. If you are enabling the Security Graph, the 'GraphTenantId','GraphClientId', and 'GraphClientSecret' is required.\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n""}, {""title"": """", ""description"": ""**4. Set Permissions for the App**\n\n1. In the Function App, select the Function App Name and select Click on **Identity** and for System assigned Identity, set the status to On. \n\n2. Next, click on **Azure role assignments** and **Add Role assignment**. Select **Subscription** as the scope, select your subscription and set the Role to **Contributor**. Click on **Save**.""}, {""title"": """", ""description"": ""**5. Complete Setup.**\n\n1. Once all application settings have been entered, click **Save**. Note that it will take some time to have the required dependencies download, so you may see some inital failure messages.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Agari Phishing Defense, Phishing Response or Brand Protection API Client ID and Secret"", ""description"": ""Ensure you have your Client ID and Secret keys. Instructions can be found on the [Agari Developers Site](https://developers.agari.com/agari-platform/docs/quick-start).""}, {""name"": ""(Optional) Microsoft Security Graph API"", ""description"": ""The Agari Function App has the ability to share threat intelleigence with Sentinel via the Security Graph API. To use this feature, you will need to enable the [Sentinel Threat Intelligence Platforms connector](https://docs.microsoft.com/azure/sentinel/connect-threat-intelligence) as well as register an application in Azure Active Directory. ""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Agari/Data%20Connectors/Agari_API_FunctionApp.json","true" +"agari_bpalerts_log_CL","Agari","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Agari","agari","Agari_MSS","2022-05-02","","","Agari","Partner","https://support.agari.com/hc/en-us/articles/360000645632-How-to-access-Agari-Support","","domains","Agari","Agari","Agari Phishing Defense and Brand Protection","This connector uses a Agari REST API connection to push data into Azure Sentinel Log Analytics.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Agari APIs to pull its logs into Azure Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""STEP 1 - Get your Agari API credentials"", ""description"": ""\n1. Log into any Agari product (Client ID and Secret are the same for all applications) \n2. Click on your username in the upper right and select **Settings**\n3. Click on the **Generate API Secret** link to generate an API client_id and client_secret (the link will read **Regenerate API Secret** if you have already generated an API client ID/secret previously)\n4. Copy both the client_id and client_secret that are generated""}, {""title"": ""STEP 2 - (Optional) Enable the Security Graph API"", ""description"": ""Follow the instrcutions found on article [Connect Azure Sentinel to your threat intelligence platform](https://docs.microsoft.com/azure/sentinel/connect-threat-intelligence#connect-azure-sentinel-to-your-threat-intelligence-platform). Once the application is created you will need to record the Tenant ID, Client ID and Client Secret.""}, {""title"": ""STEP 3 - Deploy the connector and the associated Azure Function"", ""description"": ""\n>**IMPORTANT:** Before deploying the Agari Connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Agari API credentials from the previous step."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Choose a deployement option"", ""description"": """"}, {""title"": ""Option 1: Deploy using the Azure Resource Manager (ARM) Template"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-agari-azuredeploy) \n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **Agari Client ID**, **Agari Client Secret**, select `True` or `False` for the products you subscribe to, and if you wish to share IoCs with Sentinel, select `True` For **Enable Security Graph Sharing**, and enter the required IDs from the Azure Application.\n> - The Function App will request data from the Agari APIs every 5 minutes, corresponding to the Funciton App Timer.\n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n6. **NOTE:** Due to the use of Environment Variables to store log access times, the App requires 1 additonal manual step. In the Function App, select the Function App Name and select Click on **Identity** and for System assigned Identity, click on **Azure role assignments** and **Add Role assignment**. Select **Subscription** as the scope, select your subscription and set the Role to **Contributor**. Click on **Save**.""}, {""title"": ""Option 2: Manual Deployment of Azure Functions"", ""description"": ""**1. Create a Function App**\n\n1. From the Azure Portal, navigate to [Function App](https://portal.azure.com/#blade/HubsExtension/BrowseResource/resourceType/Microsoft.Web%2Fsites/kind/functionapp), and select **+ Add**.\n2. In the **Basics** tab, ensure Runtime stack is set to **Powershell Core**. \n3. In the **Hosting** tab, ensure the **Consumption (Serverless)** plan type is selected.\n4. Make other preferrable configuration changes, if needed, then click **Create**.""}, {""title"": """", ""description"": ""**2. Import Function App Code**\n\n1. In the newly created Function App, select **Functions** on the left pane and click **+ Add**.\n2. Click on **Code + Test** on the left pane. \n3. Copy the [Function App Code](https://aka.ms/sentinel-agari-functionapp) and paste into the Function App `run.ps1` editor.\n3. Click **Save**.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following eight to twelve (8-12) application settings individually, with their respective string values (case-sensitive): \n\t\tclientID\n\t\tclientSecret\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\tenableBrandProtectionAPI\n\t\tenablePhishingResponseAPI\n\t\tenablePhishingDefenseAPI\n\t\tresGroup\n\t\tfunctionName\n\t\tsubId\n\t\tenableSecurityGraphSharing\n\t\t<--- Required if enableSecurityGraphSharing is set to true --->\n\t\tGraphTenantId\n\t\tGraphClientId\n\t\tGraphClientSecret\n\t\tlogAnalyticsUri (optional)\n> - Enter your Agari ClientID and Secret in 'clientId' and 'clientSecret'\n> - Enter 'true' or 'false' for 'enablePhishingDefense', 'enableBrandProtection', 'enablePhishingResponse' as per your product subscriptions.\n> - Enter your Resource Group name in resGroup, the name of the Function (from previous step) in functionName and your Subscription ID in subId.\n> - Enter 'true' or 'false' for 'enableSecurtyGraphAPI'. If you are enabling the Security Graph, the 'GraphTenantId','GraphClientId', and 'GraphClientSecret' is required.\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n""}, {""title"": """", ""description"": ""**4. Set Permissions for the App**\n\n1. In the Function App, select the Function App Name and select Click on **Identity** and for System assigned Identity, set the status to On. \n\n2. Next, click on **Azure role assignments** and **Add Role assignment**. Select **Subscription** as the scope, select your subscription and set the Role to **Contributor**. Click on **Save**.""}, {""title"": """", ""description"": ""**5. Complete Setup.**\n\n1. Once all application settings have been entered, click **Save**. Note that it will take some time to have the required dependencies download, so you may see some inital failure messages.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Agari Phishing Defense, Phishing Response or Brand Protection API Client ID and Secret"", ""description"": ""Ensure you have your Client ID and Secret keys. Instructions can be found on the [Agari Developers Site](https://developers.agari.com/agari-platform/docs/quick-start).""}, {""name"": ""(Optional) Microsoft Security Graph API"", ""description"": ""The Agari Function App has the ability to share threat intelleigence with Sentinel via the Security Graph API. To use this feature, you will need to enable the [Sentinel Threat Intelligence Platforms connector](https://docs.microsoft.com/azure/sentinel/connect-threat-intelligence) as well as register an application in Azure Active Directory. ""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Agari/Data%20Connectors/Agari_API_FunctionApp.json","true" +"InfoSecAnalytics_CL","AgileSec Analytics Connector","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AgileSec%20Analytics%20Connector","infosecglobal1632846037582","agilesec-analytics-connector","","","","InfoSecGlobal","Partner","https://www.infosecglobal.com/","","domains","InfoSecDataConnector","InfoSecGlobal","InfoSecGlobal Data Connector","Use this data connector to integrate with InfoSec Crypto Analytics and get data sent directly to Microsoft Sentinel.","[{""title"": ""InfoSecGlobal Crypto Analytics Data Connector"", ""description"": ""1. Data is sent to Microsoft Sentinel through Logstash\n 2. Required Logstash configuration is included with Crypto Analytics installation\n 3. Documentation provided with the Crypto Analytics installation explains how to enable sending data to Microsoft Sentinel\n"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AgileSec%20Analytics%20Connector/Data%20Connectors/Connector_Analytics_InfoSec.json","true" +"CommonSecurityLog","Akamai Security Events","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Akamai%20Security%20Events","azuresentinel","azure-sentinel-solution-akamai","2022-03-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AkamaiSecurityEvents","Akamai","[Deprecated] Akamai Security Events via Legacy Agent","Akamai Solution for Microsoft Sentinel provides the capability to ingest [Akamai Security Events](https://www.akamai.com/us/en/products/security/) into Microsoft Sentinel. Refer to [Akamai SIEM Integration documentation](https://developer.akamai.com/tools/integrations/siem) for more information.","[{""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Akamai Security Events and load the function code or click [here](https://aka.ms/sentinel-akamaisecurityevents-parser), on the second line of the query, enter the hostname(s) of your Akamai Security Events device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""[Follow these steps](https://developer.akamai.com/tools/integrations/siem) to configure Akamai CEF connector to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Akamai%20Security%20Events/Data%20Connectors/Connector_CEF_Akamai.json","true" +"CommonSecurityLog","Akamai Security Events","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Akamai%20Security%20Events","azuresentinel","azure-sentinel-solution-akamai","2022-03-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AkamaiSecurityEventsAma","Akamai","[Deprecated] Akamai Security Events via AMA","Akamai Solution for Microsoft Sentinel provides the capability to ingest [Akamai Security Events](https://www.akamai.com/us/en/products/security/) into Microsoft Sentinel. Refer to [Akamai SIEM Integration documentation](https://developer.akamai.com/tools/integrations/siem) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Akamai Security Events and load the function code or click [here](https://aka.ms/sentinel-akamaisecurityevents-parser), on the second line of the query, enter the hostname(s) of your Akamai Security Events device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""[Follow these steps](https://developer.akamai.com/tools/integrations/siem) to configure Akamai CEF connector to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address."", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Akamai%20Security%20Events/Data%20Connectors/template_AkamaiSecurityEventsAMA.json","true" +"","Alibaba Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Alibaba%20Cloud","azuresentinel","azure-sentinel-solution-alibabacloud","2022-06-27","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"AliCloudActionTrailLogs_CL","Alibaba Cloud ActionTrail","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Alibaba%20Cloud%20ActionTrail","azuresentinel","azure-sentinel-solution-alibabacloud-actiontrail","2025-07-03","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AliCloudActionTrailCCPDefinition","Microsoft","Alibaba Cloud ActionTrail (via Codeless Connector Framework)","The [Alibaba Cloud ActionTrail](https://www.alibabacloud.com/product/actiontrail) data connector provides the capability to retrieve actiontrail events stored into [Alibaba Cloud Simple Log Service](https://www.alibabacloud.com/product/log-service) and store them into Microsoft Sentinel through the [SLS REST API](https://www.alibabacloud.com/help/sls/developer-reference/api-sls-2020-12-30-getlogs). The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""title"": ""Configure access to AliCloud SLS API"", ""description"": ""Before using the API, you need to prepare your identity account and access key pair to effectively access the API.\n1. We recommend that you use a Resource Access Management (RAM) user to call API operations. For more information, see [create a RAM user and authorize the RAM user to access Simple Log Service](https://www.alibabacloud.com/help/sls/create-a-ram-user-and-authorize-the-ram-user-to-access-log-service).\n2. Obtain the access key pair for the RAM user. For details see [get Access Key pair](https://www.alibabacloud.com/help/ram/user-guide/create-an-accesskey-pair).\n\nNote the access key pair details for the next step.""}, {""title"": ""Add ActionTrail Logstore"", ""description"": ""To enable the Alibaba Cloud ActionTrail connector for Microsoft Sentinel, click upon add ActionTrail Logstore, fill the form with the Alibaba Cloud environment configuration and click Connect."", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""AliCloud SLS Logstore Endpoint URL"", ""columnValue"": ""properties.request.apiEndpoint""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add Logstore"", ""title"": ""Add ActionTrail Logstore"", ""subtitle"": ""Add SLS Logstore linked to Alibaba Cloud ActionTrail"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Alibaba Cloud SLS Public Endpoint"", ""placeholder"": "".log.aliyuncs.com"", ""type"": ""string"", ""name"": ""endpoint""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Project"", ""placeholder"": """", ""type"": ""string"", ""name"": ""project""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Logstore"", ""placeholder"": """", ""type"": ""string"", ""name"": ""logstore""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Access Key ID"", ""placeholder"": ""Access Key ID"", ""type"": ""password"", ""name"": ""accessKeyId""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Access Key Secret"", ""placeholder"": ""Access Key Secret"", ""type"": ""password"", ""name"": ""accessKeySecret""}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""SLS REST API Credentials/permissions"", ""description"": ""**AliCloudAccessKeyId** and **AliCloudAccessKeySecret** are required for making API calls. RAM policy statement with action of atleast `log:GetLogStoreLogs` over resource `acs:log:{#regionId}:{#accountId}:project/{#ProjectName}/logstore/{#LogstoreName}` is needed to grant a RAM user the permissions to call this operation.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Alibaba%20Cloud%20ActionTrail/Data%20Connectors/AliCloudCloudTrailConnector_CCP/AliCloudActionTrail_DataConnectorDefinition.json","true" +"AlsidForADLog_CL","Alsid For AD","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Alsid%20For%20AD","alsid1603447574634","Alsid_For_AD_MSS","2022-05-06","","","Alsid","Partner","https://www.alsid.com/contact-us/","","domains","AlsidForAD","Alsid","Alsid for Active Directory","Alsid for Active Directory connector allows to export Alsid Indicators of Exposures, trailflow and Indicators of Attacks logs to Azure Sentinel in real time.
It provides a data parser to manipulate the logs more easily. The different workbooks ease your Active Directory monitoring and provide different ways to visualize the data. The analytic templates allow to automate responses regarding different events, exposures, or attacks.","[{""title"": """", ""description"": "">This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://aka.ms/sentinel-alsidforad-parser) to create the Kusto Functions alias, **afad_parser**"", ""instructions"": []}, {""title"": ""1. Configure the Syslog server"", ""description"": ""You will first need a **linux Syslog** server that Alsid for AD will send logs to. Typically you can run **rsyslog** on **Ubuntu**.\n You can then configure this server as you wish, but it is recommended to be able to output AFAD logs in a separate file.\nAlternatively you can use [this Quickstart template](https://azure.microsoft.com/resources/templates/alsid-syslog-proxy/) which will deploy the Syslog server and the Microsoft agent for you. If you do use this template, you can skip step 3.""}, {""title"": ""2. Configure Alsid to send logs to your Syslog server"", ""description"": ""On your **Alsid for AD** portal, go to *System*, *Configuration* and then *Syslog*.\nFrom there you can create a new Syslog alert toward your Syslog server.\n\nOnce this is done, check that the logs are correctly gathered on your server in a seperate file (to do this, you can use the *Test the configuration* button in the Syslog alert configuration in AFAD).\nIf you used the Quickstart template, the Syslog server will by default listen on port 514 in UDP and 1514 in TCP, without TLS.""}, {""title"": ""3. Install and onboard the Microsoft agent for Linux"", ""description"": ""You can skip this step if you used the Quickstart template in step 1"", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""4. Configure the logs to be collected by the agents"", ""description"": ""Configure the agent to collect the logs.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **Custom Logs**.\n2. Select **Apply below configuration to my machines** and click **Add**.\n3. Upload a sample AFAD Syslog file from the **Linux** machine running the **Syslog** server and click **Next**, for your convenience, you can find such a file [here](https://github.com/Azure/azure-quickstart-templates/blob/master/alsid-syslog-proxy/logs/AlsidForAD.log).\n4. Set the record delimiter to **New Line** if not already the case and click **Next**.\n5. Select **Linux** and enter the file path to the **Syslog** file, click **+** then **Next**. If you used the Quickstart template in step 1, the default location of the file is `/var/log/AlsidForAD.log`.\n6. Set the **Name** to *AlsidForADLog_CL* then click **Done** (Azure automatically adds *_CL* at the end of the name, there must be only one, make sure the name is not *AlsidForADLog_CL_CL*).\n\nAll of these steps are showcased [here](https://www.youtube.com/watch?v=JwV1uZSyXM4&feature=youtu.be) as an example"", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": """", ""description"": ""> You should now be able to receive logs in the *AlsidForADLog_CL* table, logs data can be parse using the **afad_parser()** function, used by all query samples, workbooks and analytic templates.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Alsid%20For%20AD/Data%20Connectors/AlsidForAD.json","true" +"AWSCloudTrail","Amazon Web Services","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services","azuresentinel","azure-sentinel-solution-amazonwebservices","2022-05-26","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AWS","Amazon","Amazon Web Services","Follow these instructions to connect to AWS and stream your CloudTrail logs into Microsoft Sentinel. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2218883&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Connect AWS cloud trail with Microsoft Sentinel\u200b"", ""description"": ""The connection necessitates giving Microsoft permissions to access your AWS account. To enable this, follow the instructions under [Connect AWS to Microsoft Sentinel](https://aka.ms/AWSConnector) and use these parameters when prompted:\n\n> Data from all regions will be sent to and stored in the workspace's region.\n\n> It takes about 5 minutes until the connection streams data to your workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""MicrosoftAwsAccount""], ""label"": ""Microsoft account ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""External ID (Workspace ID)""}, ""type"": ""CopyableLabel""}, {""parameters"": {""text"": ""The integration is applicable for AWS public cloud accounts."", ""visible"": false, ""inline"": true}, ""type"": ""InfoMessage""}, {""parameters"": {}, ""type"": ""AwsCloudTrail""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services/Data%20Connectors/template_AWS.json","true" +"AWSCloudTrail","Amazon Web Services","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services","azuresentinel","azure-sentinel-solution-amazonwebservices","2022-05-26","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AwsS3","Amazon","Amazon Web Services S3","This connector allows you to ingest AWS service logs, collected in AWS S3 buckets, to Microsoft Sentinel. The currently supported data types are:
* AWS CloudTrail
* VPC Flow Logs
* AWS GuardDuty
* AWSCloudWatch

For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2218883&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""1. Set up your AWS environment"", ""description"": ""There are two options for setting up your AWS environment to send logs from an S3 bucket to your Log Analytics Workspace:"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Setup with PowerShell script (recommended)"", ""instructions"": [{""parameters"": {""govScript"": ""Download and extract the files from the following link: [AWS S3 Setup Script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/AWS-S3/ConfigAwsS3DataConnectorScriptsGov.zip).\n\n> 1. Make sure that you have PowerShell on your machine: [Installation instructions for PowerShell](https://docs.microsoft.com/powershell/scripting/install/installing-powershell?view=powershell-7.2).\n\n> 2. Make sure that you have the AWS CLI on your machine: [Installation instructions for the AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/install-cliv2.html).\n\nBefore running the script, run the aws configure command from your PowerShell command line, and enter the relevant information as prompted. See [AWS Command Line Interface | Configuration basics](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-quickstart.html) for details. Note: When Aws configure is run, Default output format should not be set to None. It must be set to some value, such as json."", ""prodScript"": ""Download and extract the files from the following link: [AWS S3 Setup Script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/AWS-S3/ConfigAwsS3DataConnectorScripts.zip).\n\n> 1. Make sure that you have PowerShell on your machine: [Installation instructions for PowerShell](https://docs.microsoft.com/powershell/scripting/install/installing-powershell?view=powershell-7.2).\n\n> 2. Make sure that you have the AWS CLI on your machine: [Installation instructions for the AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/install-cliv2.html).\n\nBefore running the script, run the aws configure command from your PowerShell command line, and enter the relevant information as prompted. See [AWS Command Line Interface | Configuration basics](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-quickstart.html) for details. Note: When Aws configure is run, Default output format should not be set to None. It must be set to some value, such as json.""}, ""type"": ""MarkdownControlEnvBased""}, {""parameters"": {""label"": ""Run script to set up the environment"", ""value"": ""./ConfigAwsConnector.ps1""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""External ID (Workspace ID)""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Manual Setup"", ""description"": ""Follow the instruction in the following link to set up the environment: [Connect AWS S3 to Microsoft Sentinel](https://aka.ms/AWSS3Connector)""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Add connection"", ""instructions"": [{""parameters"": {}, ""type"": ""AwsS3""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Environment"", ""description"": ""you must have the following AWS resources defined and configured: S3, Simple Queue Service (SQS), IAM roles and permissions policies, and the AWS services whose logs you want to collect.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services/Data%20Connectors/template_AwsS3.json","true" +"AWSCloudWatch","Amazon Web Services","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services","azuresentinel","azure-sentinel-solution-amazonwebservices","2022-05-26","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AwsS3","Amazon","Amazon Web Services S3","This connector allows you to ingest AWS service logs, collected in AWS S3 buckets, to Microsoft Sentinel. The currently supported data types are:
* AWS CloudTrail
* VPC Flow Logs
* AWS GuardDuty
* AWSCloudWatch

For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2218883&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""1. Set up your AWS environment"", ""description"": ""There are two options for setting up your AWS environment to send logs from an S3 bucket to your Log Analytics Workspace:"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Setup with PowerShell script (recommended)"", ""instructions"": [{""parameters"": {""govScript"": ""Download and extract the files from the following link: [AWS S3 Setup Script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/AWS-S3/ConfigAwsS3DataConnectorScriptsGov.zip).\n\n> 1. Make sure that you have PowerShell on your machine: [Installation instructions for PowerShell](https://docs.microsoft.com/powershell/scripting/install/installing-powershell?view=powershell-7.2).\n\n> 2. Make sure that you have the AWS CLI on your machine: [Installation instructions for the AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/install-cliv2.html).\n\nBefore running the script, run the aws configure command from your PowerShell command line, and enter the relevant information as prompted. See [AWS Command Line Interface | Configuration basics](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-quickstart.html) for details. Note: When Aws configure is run, Default output format should not be set to None. It must be set to some value, such as json."", ""prodScript"": ""Download and extract the files from the following link: [AWS S3 Setup Script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/AWS-S3/ConfigAwsS3DataConnectorScripts.zip).\n\n> 1. Make sure that you have PowerShell on your machine: [Installation instructions for PowerShell](https://docs.microsoft.com/powershell/scripting/install/installing-powershell?view=powershell-7.2).\n\n> 2. Make sure that you have the AWS CLI on your machine: [Installation instructions for the AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/install-cliv2.html).\n\nBefore running the script, run the aws configure command from your PowerShell command line, and enter the relevant information as prompted. See [AWS Command Line Interface | Configuration basics](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-quickstart.html) for details. Note: When Aws configure is run, Default output format should not be set to None. It must be set to some value, such as json.""}, ""type"": ""MarkdownControlEnvBased""}, {""parameters"": {""label"": ""Run script to set up the environment"", ""value"": ""./ConfigAwsConnector.ps1""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""External ID (Workspace ID)""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Manual Setup"", ""description"": ""Follow the instruction in the following link to set up the environment: [Connect AWS S3 to Microsoft Sentinel](https://aka.ms/AWSS3Connector)""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Add connection"", ""instructions"": [{""parameters"": {}, ""type"": ""AwsS3""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Environment"", ""description"": ""you must have the following AWS resources defined and configured: S3, Simple Queue Service (SQS), IAM roles and permissions policies, and the AWS services whose logs you want to collect.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services/Data%20Connectors/template_AwsS3.json","true" +"AWSGuardDuty","Amazon Web Services","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services","azuresentinel","azure-sentinel-solution-amazonwebservices","2022-05-26","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AwsS3","Amazon","Amazon Web Services S3","This connector allows you to ingest AWS service logs, collected in AWS S3 buckets, to Microsoft Sentinel. The currently supported data types are:
* AWS CloudTrail
* VPC Flow Logs
* AWS GuardDuty
* AWSCloudWatch

For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2218883&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""1. Set up your AWS environment"", ""description"": ""There are two options for setting up your AWS environment to send logs from an S3 bucket to your Log Analytics Workspace:"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Setup with PowerShell script (recommended)"", ""instructions"": [{""parameters"": {""govScript"": ""Download and extract the files from the following link: [AWS S3 Setup Script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/AWS-S3/ConfigAwsS3DataConnectorScriptsGov.zip).\n\n> 1. Make sure that you have PowerShell on your machine: [Installation instructions for PowerShell](https://docs.microsoft.com/powershell/scripting/install/installing-powershell?view=powershell-7.2).\n\n> 2. Make sure that you have the AWS CLI on your machine: [Installation instructions for the AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/install-cliv2.html).\n\nBefore running the script, run the aws configure command from your PowerShell command line, and enter the relevant information as prompted. See [AWS Command Line Interface | Configuration basics](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-quickstart.html) for details. Note: When Aws configure is run, Default output format should not be set to None. It must be set to some value, such as json."", ""prodScript"": ""Download and extract the files from the following link: [AWS S3 Setup Script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/AWS-S3/ConfigAwsS3DataConnectorScripts.zip).\n\n> 1. Make sure that you have PowerShell on your machine: [Installation instructions for PowerShell](https://docs.microsoft.com/powershell/scripting/install/installing-powershell?view=powershell-7.2).\n\n> 2. Make sure that you have the AWS CLI on your machine: [Installation instructions for the AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/install-cliv2.html).\n\nBefore running the script, run the aws configure command from your PowerShell command line, and enter the relevant information as prompted. See [AWS Command Line Interface | Configuration basics](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-quickstart.html) for details. Note: When Aws configure is run, Default output format should not be set to None. It must be set to some value, such as json.""}, ""type"": ""MarkdownControlEnvBased""}, {""parameters"": {""label"": ""Run script to set up the environment"", ""value"": ""./ConfigAwsConnector.ps1""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""External ID (Workspace ID)""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Manual Setup"", ""description"": ""Follow the instruction in the following link to set up the environment: [Connect AWS S3 to Microsoft Sentinel](https://aka.ms/AWSS3Connector)""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Add connection"", ""instructions"": [{""parameters"": {}, ""type"": ""AwsS3""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Environment"", ""description"": ""you must have the following AWS resources defined and configured: S3, Simple Queue Service (SQS), IAM roles and permissions policies, and the AWS services whose logs you want to collect.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services/Data%20Connectors/template_AwsS3.json","true" +"AWSVPCFlow","Amazon Web Services","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services","azuresentinel","azure-sentinel-solution-amazonwebservices","2022-05-26","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AwsS3","Amazon","Amazon Web Services S3","This connector allows you to ingest AWS service logs, collected in AWS S3 buckets, to Microsoft Sentinel. The currently supported data types are:
* AWS CloudTrail
* VPC Flow Logs
* AWS GuardDuty
* AWSCloudWatch

For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2218883&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""1. Set up your AWS environment"", ""description"": ""There are two options for setting up your AWS environment to send logs from an S3 bucket to your Log Analytics Workspace:"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Setup with PowerShell script (recommended)"", ""instructions"": [{""parameters"": {""govScript"": ""Download and extract the files from the following link: [AWS S3 Setup Script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/AWS-S3/ConfigAwsS3DataConnectorScriptsGov.zip).\n\n> 1. Make sure that you have PowerShell on your machine: [Installation instructions for PowerShell](https://docs.microsoft.com/powershell/scripting/install/installing-powershell?view=powershell-7.2).\n\n> 2. Make sure that you have the AWS CLI on your machine: [Installation instructions for the AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/install-cliv2.html).\n\nBefore running the script, run the aws configure command from your PowerShell command line, and enter the relevant information as prompted. See [AWS Command Line Interface | Configuration basics](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-quickstart.html) for details. Note: When Aws configure is run, Default output format should not be set to None. It must be set to some value, such as json."", ""prodScript"": ""Download and extract the files from the following link: [AWS S3 Setup Script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/AWS-S3/ConfigAwsS3DataConnectorScripts.zip).\n\n> 1. Make sure that you have PowerShell on your machine: [Installation instructions for PowerShell](https://docs.microsoft.com/powershell/scripting/install/installing-powershell?view=powershell-7.2).\n\n> 2. Make sure that you have the AWS CLI on your machine: [Installation instructions for the AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/install-cliv2.html).\n\nBefore running the script, run the aws configure command from your PowerShell command line, and enter the relevant information as prompted. See [AWS Command Line Interface | Configuration basics](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-quickstart.html) for details. Note: When Aws configure is run, Default output format should not be set to None. It must be set to some value, such as json.""}, ""type"": ""MarkdownControlEnvBased""}, {""parameters"": {""label"": ""Run script to set up the environment"", ""value"": ""./ConfigAwsConnector.ps1""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""External ID (Workspace ID)""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Manual Setup"", ""description"": ""Follow the instruction in the following link to set up the environment: [Connect AWS S3 to Microsoft Sentinel](https://aka.ms/AWSS3Connector)""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Add connection"", ""instructions"": [{""parameters"": {}, ""type"": ""AwsS3""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Environment"", ""description"": ""you must have the following AWS resources defined and configured: S3, Simple Queue Service (SQS), IAM roles and permissions policies, and the AWS services whose logs you want to collect.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services/Data%20Connectors/template_AwsS3.json","true" +"AWSWAF","Amazon Web Services","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services","azuresentinel","azure-sentinel-solution-amazonwebservices","2022-05-26","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AwsS3WafCcpDefinition","Microsoft","Amazon Web Services S3 WAF","This connector allows you to ingest AWS WAF logs, collected in AWS S3 buckets, to Microsoft Sentinel. AWS WAF logs are detailed records of traffic that web access control lists (ACLs) analyze, which are essential for maintaining the security and performance of web applications. These logs contain information such as the time AWS WAF received the request, the specifics of the request, and the action taken by the rule that the request matched.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. AWS CloudFormation Deployment \n To configure access on AWS, two templates has been generated to set up the AWS environment to send logs from an S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). \n 2. Choose the \u2018Specify template\u2019 option, then \u2018Upload a template file\u2019 by clicking on \u2018Choose file\u2019 and selecting the appropriate CloudFormation template file provided below. click \u2018Choose file\u2019 and select the downloaded template. \n 3. Click 'Next' and 'Create stack'.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS WAF resources deployment"", ""isMultiLine"": true, ""fillWith"": [""AwsWAF""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": false, ""write"": false, ""delete"": false, ""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services/Data%20Connectors/AWS_WAF_CCP/AwsS3_WAF_DataConnectorDefinition.json","true" +"AWSNetworkFirewallAlert","Amazon Web Services NetworkFirewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services%20NetworkFirewall","azuresentinel","azure-sentinel-solution-aws-networkfirewall","2025-03-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AwsNetworkFirewallCcpDefinition","Microsoft","Amazon Web Services NetworkFirewall (via Codeless Connector Framework)","This data connector allows you to ingest AWS Network Firewall logs into Microsoft Sentinel for advanced threat detection and security monitoring. By leveraging Amazon S3 and Amazon SQS, the connector forwards network traffic logs, intrusion detection alerts, and firewall events to Microsoft Sentinel, enabling real-time analysis and correlation with other security data","[{""title"": ""Ingesting AWS NetworkFirewall logs in Microsoft Sentinel"", ""description"": ""### List of Resources Required:\n\n* Open ID Connect (OIDC) web identity provider\n* IAM Role\n* Amazon S3 Bucket\n* Amazon SQS\n* AWSNetworkFirewall configuration\n* Follow this instructions for [AWS NetworkFirewall Data connector](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services%20NetworkFirewall/Data%20Connectors/readme.md) configuration \n\n"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. AWS CloudFormation Deployment \n To configure access on AWS, two templates has been generated to set up the AWS environment to send logs from an S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). \n 2. Choose the \u2018**Specify template**\u2019 option, then \u2018**Upload a template file**\u2019 by clicking on \u2018**Choose file**\u2019 and selecting the appropriate CloudFormation template file provided below. click \u2018**Choose file**\u2019 and select the downloaded template. \n 3. Click '**Next**' and '**Create stack**'.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWSNetworkFirewall resources deployment"", ""isMultiLine"": true, ""fillWith"": [""AWSNetworkFirewall""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-AWSNetworkFirewall-AlertLog"", ""text"": ""Alert Log""}, {""key"": ""Custom-AWSNetworkFirewall-FlowLog"", ""text"": ""Flow Log""}, {""key"": ""Custom-AWSNetworkFirewall-TlsLog"", ""text"": ""Tls Log""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services%20NetworkFirewall/Data%20Connectors/AWSNetworkFirewallLogs_CCP/AWSNetworkFirewallLog_ConnectorDefinition.json","true" +"AWSNetworkFirewallFlow","Amazon Web Services NetworkFirewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services%20NetworkFirewall","azuresentinel","azure-sentinel-solution-aws-networkfirewall","2025-03-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AwsNetworkFirewallCcpDefinition","Microsoft","Amazon Web Services NetworkFirewall (via Codeless Connector Framework)","This data connector allows you to ingest AWS Network Firewall logs into Microsoft Sentinel for advanced threat detection and security monitoring. By leveraging Amazon S3 and Amazon SQS, the connector forwards network traffic logs, intrusion detection alerts, and firewall events to Microsoft Sentinel, enabling real-time analysis and correlation with other security data","[{""title"": ""Ingesting AWS NetworkFirewall logs in Microsoft Sentinel"", ""description"": ""### List of Resources Required:\n\n* Open ID Connect (OIDC) web identity provider\n* IAM Role\n* Amazon S3 Bucket\n* Amazon SQS\n* AWSNetworkFirewall configuration\n* Follow this instructions for [AWS NetworkFirewall Data connector](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services%20NetworkFirewall/Data%20Connectors/readme.md) configuration \n\n"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. AWS CloudFormation Deployment \n To configure access on AWS, two templates has been generated to set up the AWS environment to send logs from an S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). \n 2. Choose the \u2018**Specify template**\u2019 option, then \u2018**Upload a template file**\u2019 by clicking on \u2018**Choose file**\u2019 and selecting the appropriate CloudFormation template file provided below. click \u2018**Choose file**\u2019 and select the downloaded template. \n 3. Click '**Next**' and '**Create stack**'.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWSNetworkFirewall resources deployment"", ""isMultiLine"": true, ""fillWith"": [""AWSNetworkFirewall""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-AWSNetworkFirewall-AlertLog"", ""text"": ""Alert Log""}, {""key"": ""Custom-AWSNetworkFirewall-FlowLog"", ""text"": ""Flow Log""}, {""key"": ""Custom-AWSNetworkFirewall-TlsLog"", ""text"": ""Tls Log""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services%20NetworkFirewall/Data%20Connectors/AWSNetworkFirewallLogs_CCP/AWSNetworkFirewallLog_ConnectorDefinition.json","true" +"AWSNetworkFirewallTls","Amazon Web Services NetworkFirewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services%20NetworkFirewall","azuresentinel","azure-sentinel-solution-aws-networkfirewall","2025-03-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AwsNetworkFirewallCcpDefinition","Microsoft","Amazon Web Services NetworkFirewall (via Codeless Connector Framework)","This data connector allows you to ingest AWS Network Firewall logs into Microsoft Sentinel for advanced threat detection and security monitoring. By leveraging Amazon S3 and Amazon SQS, the connector forwards network traffic logs, intrusion detection alerts, and firewall events to Microsoft Sentinel, enabling real-time analysis and correlation with other security data","[{""title"": ""Ingesting AWS NetworkFirewall logs in Microsoft Sentinel"", ""description"": ""### List of Resources Required:\n\n* Open ID Connect (OIDC) web identity provider\n* IAM Role\n* Amazon S3 Bucket\n* Amazon SQS\n* AWSNetworkFirewall configuration\n* Follow this instructions for [AWS NetworkFirewall Data connector](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services%20NetworkFirewall/Data%20Connectors/readme.md) configuration \n\n"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. AWS CloudFormation Deployment \n To configure access on AWS, two templates has been generated to set up the AWS environment to send logs from an S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). \n 2. Choose the \u2018**Specify template**\u2019 option, then \u2018**Upload a template file**\u2019 by clicking on \u2018**Choose file**\u2019 and selecting the appropriate CloudFormation template file provided below. click \u2018**Choose file**\u2019 and select the downloaded template. \n 3. Click '**Next**' and '**Create stack**'.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWSNetworkFirewall resources deployment"", ""isMultiLine"": true, ""fillWith"": [""AWSNetworkFirewall""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-AWSNetworkFirewall-AlertLog"", ""text"": ""Alert Log""}, {""key"": ""Custom-AWSNetworkFirewall-FlowLog"", ""text"": ""Flow Log""}, {""key"": ""Custom-AWSNetworkFirewall-TlsLog"", ""text"": ""Tls Log""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services%20NetworkFirewall/Data%20Connectors/AWSNetworkFirewallLogs_CCP/AWSNetworkFirewallLog_ConnectorDefinition.json","true" +"AWSRoute53Resolver","Amazon Web Services Route 53","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services%20Route%2053","azuresentinel","azure-sentinel-solution-amazonwebservicesroute53","2025-03-21","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AWSRoute53ResolverCCPDefinition","Microsoft","Amazon Web Services S3 DNS Route53 (via Codeless Connector Framework)","This connector enables ingestion of AWS Route 53 DNS logs into Microsoft Sentinel for enhanced visibility and threat detection. It supports DNS Resolver query logs ingested directly from AWS S3 buckets, while Public DNS query logs and Route 53 audit logs can be ingested using Microsoft Sentinel's AWS CloudWatch and CloudTrail connectors. Comprehensive instructions are provided to guide you through the setup of each log type. Leverage this connector to monitor DNS activity, detect potential threats, and improve your security posture in cloud environments.","[{""title"": ""AWS Route53"", ""description"": ""This connector enables the ingestion of AWS Route 53 DNS logs into Microsoft Sentinel, providing enhanced visibility into DNS activity and strengthening threat detection capabilities. It supports direct ingestion of DNS Resolver query logs from AWS S3 buckets, while Public DNS query logs and Route 53 audit logs can be ingested via Microsoft Sentinel\u2019s AWS CloudWatch and CloudTrail connectors. Detailed setup instructions are provided for each log type. Use this connector to monitor DNS traffic, identify potential threats, and enhance your cloud security posture.\n\nYou can ingest the following type of logs from AWS Route 53 to Microsoft Sentinel:\n1. Route 53 Resolver query logs\n2. Route 53 Public Hosted zones query logs (via Microsoft Sentinel CloudWatch connector)\n3. Route 53 audit logs (via Microsoft Sentinel CloudTrail connector)""}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Ingesting Route53 Resolver query logs in Microsoft Sentinel"", ""description"": ""### List of Resources Required:\n\n* Open ID Connect (OIDC) web identity provider\n* IAM Role\n* Amazon S3 Bucket\n* Amazon SQS\n* Route 53 Resolver query logging configuration\n* VPC to associate with Route53 Resolver query log config\n\n"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. AWS CloudFormation Deployment \n To configure access on AWS, two templates has been generated to set up the AWS environment to send logs from an S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). \n 2. Choose the \u2018**Specify template**\u2019 option, then \u2018**Upload a template file**\u2019 by clicking on \u2018**Choose file**\u2019 and selecting the appropriate CloudFormation template file provided below. click \u2018**Choose file**\u2019 and select the downloaded template. \n 3. Click '**Next**' and '**Create stack**'.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS Route53 resources deployment"", ""isMultiLine"": true, ""fillWith"": [""AWSRoute53Resolver""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""### 2. Connect new collectors \n To enable Amazon Web Services S3 DNS Route53 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS Security Hub connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""securestring"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""securestring"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}]}]}}]}, {""title"": ""Ingesting Route 53 Public Hosted zones query logs (via Microsoft Sentinel CloudWatch connector)"", ""description"": ""Public Hosted zone query logs are exported to CloudWatch service in AWS. We can use 'Amazon Web Services S3' connector to ingest CloudWatch logs from AWS to Microsoft Sentinel."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1: Configure logging for Public DNS queries"", ""description"": ""1. Sign in to the AWS Management Console and open the Route 53 console at [AWS Route 53](https://console.aws.amazon.com/route53/).\n2. Navigate to Route 53 > Hosted zones.\n3. Choose the Public hosted zone that you want to configure query logging for.\n4. In the Hosted zone details pane, click \""Configure query logging\"".\n5. Choose an existing log group or create a new log group.\n6. Choose Create.""}, {""title"": ""Step 2: Configure Amazon Web Services S3 data connector for AWS CloudWatch"", ""description"": ""AWS CloudWatch logs can be exported to an S3 bucket using lambda function. To ingest Public DNS queries from `AWS CloudWatch` to `S3` bucket and then to Microsoft Sentinel, follow the instructions provided in the [Amazon Web Services S3 connector](https://learn.microsoft.com/en-us/azure/sentinel/connect-aws?tabs=s3).""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Ingesting Route 53 audit logs (via Microsoft Sentinel CloudTrail connector)"", ""description"": ""Route 53 audit logs i.e. the logs related to actions taken by user, role or AWS service in Route 53 can be exported to an S3 bucket via AWS CloudTrail service. We can use 'Amazon Web Services S3' connector to ingest CloudTrail logs from AWS to Microsoft Sentinel."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1: Configure logging for AWS Route 53 Audit logs"", ""description"": ""1. Sign in to the AWS Management Console and open the CloudTrail console at [AWS CloudTrail](https://console.aws.amazon.com/cloudtrail)\n2. If you do not have an existing trail, click on 'Create trail'\n3. Enter a name for your trail in the Trail name field.\n4. Select Create new S3 bucket (you may also choose to use an existing S3 bucket).\n5. Leave the other settings as default, and click Next.\n6. Select Event type, make sure Management events is selected.\n7. Select API activity, 'Read' and 'Write'\n8. Click Next.\n9. Review the settings and click 'Create trail'.""}, {""title"": ""Step 2: Configure Amazon Web Services S3 data connector for AWS CloudTrail"", ""description"": ""To ingest audit and management logs from `AWS CloudTrail` to Microsoft Sentinel, follow the instructions provided in the [Amazon Web Services S3 connector](https://learn.microsoft.com/en-us/azure/sentinel/connect-aws?tabs=s3)""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": false, ""write"": false, ""delete"": false, ""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Amazon%20Web%20Services%20Route%2053/Data%20Connectors/AWSRoute53Resolver_CCP/AWSRoute53Resolver_DataConnectorDefinition.json","true" +"Anvilogic_Alerts_CL","Anvilogic","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Anvilogic","anvilogic1725900018831","azure-sentinel-solution-anvilogic","2025-06-20","","","Anvilogic","Partner","https://www.anvilogic.com/","","domains","AnvilogicCCFDefinition","Anvilogic","Anvilogic","The Anvilogic data connector allows you to pull events of interest generated in the Anvilogic ADX cluster into your Microsoft Sentinel","[{""description"": ""Complete the form to ingest Anvilogic Alerts into your Microsoft Sentinel"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Token Endpoint"", ""placeholder"": ""https://login[.]microsoftonline[.]com//oauth2/v2.0/token"", ""type"": ""text"", ""name"": ""tokenEndpoint""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Anvilogic ADX Scope"", ""placeholder"": ""/.default"", ""type"": ""text"", ""name"": ""authorizationEndpoint""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Anvilogic ADX Request URI"", ""placeholder"": ""/v2/rest/query"", ""type"": ""text"", ""name"": ""apiEndpoint""}}, {""type"": ""OAuthForm"", ""parameters"": {""clientIdLabel"": ""Client ID"", ""clientSecretLabel"": ""Client Secret"", ""connectButtonLabel"": ""Connect"", ""disconnectButtonLabel"": ""Disconnect""}}], ""title"": ""Connect to Anvilogic to start collecting events of interest in Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Anvilogic Application Registration Client ID and Client Secret"", ""description"": ""To access the Anvilogic ADX we require the client id and client secret from the Anvilogic app registration""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Anvilogic/Data%20Connectors/AnviLogic_CCF/Anvilogic_DataConnectorDefinition.json","true" +"","Apache Log4j Vulnerability Detection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Apache%20Log4j%20Vulnerability%20Detection","azuresentinel","azure-sentinel-solution-apachelog4jvulnerability","2021-12-15","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"ApacheHTTPServer_CL","ApacheHTTPServer","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ApacheHTTPServer","azuresentinel","azure-sentinel-solution-apachehttpserver","2021-10-27","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","ApacheHTTPServer","Apache","[Deprecated] Apache HTTP Server","The Apache HTTP Server data connector provides the capability to ingest [Apache HTTP Server](http://httpd.apache.org/) events into Microsoft Sentinel. Refer to [Apache Logs documentation](https://httpd.apache.org/docs/2.4/logs.html) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias ApacheHTTPServer and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ApacheHTTPServer/Parsers/ApacheHTTPServer.txt). The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Apache HTTP Server where the logs are generated.\n\n> Logs from Apache HTTP Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the custom log directory to be collected"", ""instructions"": [{""parameters"": {""linkType"": ""OpenCustomLogsSettings""}, ""type"": ""InstallAgent""}]}, {""title"": """", ""description"": ""1. Select the link above to open your workspace advanced settings \n2. From the left pane, select **Data**, select **Custom Logs** and click **Add+**\n3. Click **Browse** to upload a sample of a Apache HTTP Server log file (e.g. access.log or error.log). Then, click **Next >**\n4. Select **New line** as the record delimiter and click **Next >**\n5. Select **Windows** or **Linux** and enter the path to Apache HTTP logs based on your configuration. Example: \n - **Windows** directory: `C:\\Server\\bin\\Apache24\\logs\\*.log`\n - **Linux** Directory: `/var/log/httpd/*.log` \n6. After entering the path, click the '+' symbol to apply, then click **Next >** \n7. Add **ApacheHTTPServer_CL** as the custom log Name and click **Done**""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ApacheHTTPServer/Data%20Connectors/Connector_ApacheHTTPServer_agent.json","true" +"CommonSecurityLog","AristaAwakeSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AristaAwakeSecurity","arista-networks","awake-security","2021-10-18","","","Arista - Awake Security","Partner","https://awakesecurity.com/","","domains","AristaAwakeSecurity","Arista Networks","[Deprecated] Awake Security via Legacy Agent","The Awake Security CEF connector allows users to send detection model matches from the Awake Security Platform to Microsoft Sentinel. Remediate threats quickly with the power of network detection and response and speed up investigations with deep visibility especially into unmanaged entities including users, devices and applications on your network. The connector also enables the creation of network security-focused custom alerts, incidents, workbooks and notebooks that align with your existing security operations workflows. ","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Awake Adversarial Model match results to a CEF collector."", ""description"": ""Perform the following steps to forward Awake Adversarial Model match results to a CEF collector listening on TCP port **514** at IP **192.168.0.1**:\n- Navigate to the Detection Management Skills page in the Awake UI.\n- Click + Add New Skill.\n- Set the Expression field to,\n>integrations.cef.tcp { destination: \""192.168.0.1\"", port: 514, secure: false, severity: Warning }\n- Set the Title field to a descriptive name like,\n>Forward Awake Adversarial Model match result to Microsoft Sentinel.\n- Set the Reference Identifier to something easily discoverable like,\n>integrations.cef.sentinel-forwarder\n- Click Save.\n\nNote: Within a few minutes of saving the definition and other fields the system will begin sending new model match results to the CEF events collector as they are detected.\n\nFor more information, refer to the **Adding a Security Information and Event Management Push Integration** page from the Help Documentation in the Awake UI.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AristaAwakeSecurity/Data%20Connectors/Connector_AristaAwakeSecurity_CEF.json","true" +"Armis_Activities_CL","Armis","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armis","armisinc1668090987837","armis-solution","2022-08-02","2024-08-23","","Armis Corporation","Partner","https://support.armis.com/","","domains","ArmisActivities","Armis","Armis Activities","The [Armis](https://www.armis.com/) Activities connector gives the capability to ingest Armis device Activities into Microsoft Sentinel through the Armis REST API. Refer to the API documentation: `https://.armis.com/api/v1/doc` for more information. The connector provides the ability to get device activity information from the Armis platform. Armis uses your existing infrastructure to discover and identify devices without having to deploy any agents. Armis detects what all devices are doing in your environment and classifies those activities to get a complete picture of device behavior. These activities are analyzed for an understanding of normal and abnormal device behavior and used to assess device and network risk.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Armis API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias ArmisActivities and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armis/Parsers/ArmisActivities.yaml). The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Armis API**\n\n Follow these instructions to create an Armis API secret key.\n 1. Log into your Armis instance\n 2. Navigate to Settings -> API Management\n 3. If the secret key has not already been created, press the Create button to create the secret key\n 4. To access the secret key, press the Show button\n 5. The secret key can now be copied and used during the Armis Activities connector configuration""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Armis Activities data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Armis API Authorization Key(s)"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Armis connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ArmisActivitiesAPI-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-ArmisActivitiesAPI-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tArmis Secret Key \n\t\tArmis URL (https://.armis.com/api/v1/) \n\t\tArmis Activity Table Name \n\t\tArmis Schedule \n\t\tAvoid Duplicates (Default: false) \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Armis Activity data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-ArmisActivitiesAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ARMISXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tArmis Secret Key \n\t\tArmis URL (https://.armis.com/api/v1/) \n\t\tArmis Activity Table Name \n\t\tArmis Schedule \n\t\tAvoid Duplicates (Default: false) \n\t\tlogAnalyticsUri (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Armis Secret Key** is required. See the documentation to learn more about API on the `https://.armis.com/api/v1/doc`""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armis/Data%20Connectors/ArmisActivities/ArmisActivities_API_FunctionApp.json","true" +"Armis_Alerts_CL","Armis","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armis","armisinc1668090987837","armis-solution","2022-08-02","2024-08-23","","Armis Corporation","Partner","https://support.armis.com/","","domains","ArmisAlerts","Armis","Armis Alerts","The [Armis](https://www.armis.com/) Alerts connector gives the capability to ingest Armis Alerts into Microsoft Sentinel through the Armis REST API. Refer to the API documentation: `https://.armis.com/api/v1/docs` for more information. The connector provides the ability to get alert information from the Armis platform and to identify and prioritize threats in your environment. Armis uses your existing infrastructure to discover and identify devices without having to deploy any agents. ","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Armis API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://aka.ms/sentinel-ArmisAlertsAPI-parser) to create the Kusto functions alias, **ArmisAlerts**""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Armis API**\n\n Follow these instructions to create an Armis API secret key.\n 1. Log into your Armis instance\n 2. Navigate to Settings -> API Management\n 3. If the secret key has not already been created, press the Create button to create the secret key\n 4. To access the secret key, press the Show button\n 5. The secret key can now be copied and used during the Armis Alerts connector configuration""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Armis Alert data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Armis API Authorization Key(s)"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Armis connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ArmisAlertsAPI-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-ArmisAlertsAPI-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tArmis Secret Key \n\t\tArmis URL (https://.armis.com/api/v1/) \n\t\tArmis Alert Table Name \n\t\tArmis Schedule \n\t\tAvoid Duplicates (Default: true) \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Armis Alert data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-ArmisAlertsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ARMISXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tArmis Secret Key \n\t\tArmis URL (https://.armis.com/api/v1/) \n\t\tArmis Alert Table Name \n\t\tArmis Schedule \n\t\tAvoid Duplicates (Default: true) \n\t\tlogAnalyticsUri (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Armis Secret Key** is required. See the documentation to learn more about API on the `https://.armis.com/api/v1/doc`""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armis/Data%20Connectors/ArmisAlerts/ArmisAlerts_API_FunctionApp.json","true" +"Armis_Activities_CL","Armis","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armis","armisinc1668090987837","armis-solution","2022-08-02","2024-08-23","","Armis Corporation","Partner","https://support.armis.com/","","domains","ArmisAlertsActivities","Armis","Armis Alerts Activities","The [Armis](https://www.armis.com/) Alerts Activities connector gives the capability to ingest Armis Alerts and Activities into Microsoft Sentinel through the Armis REST API. Refer to the API documentation: `https://.armis.com/api/v1/docs` for more information. The connector provides the ability to get alert and activity information from the Armis platform and to identify and prioritize threats in your environment. Armis uses your existing infrastructure to discover and identify devices without having to deploy any agents. ","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Armis API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias ArmisActivities/ArmisAlerts and load the function code. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Armis API**\n\n Follow these instructions to create an Armis API secret key.\n 1. Log into your Armis instance\n 2. Navigate to Settings -> API Management\n 3. If the secret key has not already been created, press the Create button to create the secret key\n 4. To access the secret key, press the Show button\n 5. The secret key can now be copied and used during the Armis Alerts Activities connector configuration""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Armis Alerts Activities Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of Armis Alerts Activities Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Armis Alerts Activities Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 5 - Create a Keyvault**\n\n Follow these instructions to create a new Keyvault.\n 1. In the Azure portal, Go to **Key vaults**. Click create.\n 2. Select Subsciption, Resource Group and provide unique name of keyvault.\n\n> **NOTE:** Create a separate key vault for each **API key** within one workspace.""}, {""title"": """", ""description"": ""**STEP 6 - Create Access Policy in Keyvault**\n\n Follow these instructions to create access policy in Keyvault.\n 1. Go to keyvaults, select your keyvault, go to Access policies on left side panel. Click create.\n 2. Select all keys & secrets permissions. Click next.\n 3. In the principal section, search by application name which was generated in STEP - 2. Click next.\n\n> **NOTE:** Ensure the Permission model in the Access Configuration of Key Vault is set to **'Vault access policy'**""}, {""title"": """", ""description"": ""**STEP 7 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Armis Alerts Activities data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Armis API Authorization Key(s)"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Armis connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ArmisAlertsActivitiesAPI-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-ArmisAlertsActivitiesAPI-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tArmis Secret Key \n\t\tArmis URL (https://.armis.com/api/v1/) \n\t\tArmis Alert Table Name \n\t\tArmis Activity Table Name \n\t\tSeverity (Default: Low) \n\t\tArmis Schedule \n\t\tKeyVault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tTenant Id \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Armis Alerts Activities data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-ArmisAlertsActivitiesAPI311-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ARMISXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tArmis Secret Key \n\t\tArmis URL (https://.armis.com/api/v1/) \n\t\tArmis Alert Table Name \n\t\tArmis Activity Table Name \n\t\tSeverity (Default: Low) \n\t\tArmis Schedule \n\t\tKeyVault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tTenant Id \n\t\tlogAnalyticsUri (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Armis Secret Key** is required. See the documentation to learn more about API on the `https://.armis.com/api/v1/doc`""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armis/Data%20Connectors/ArmisAlertsActivities/ArmisAlertsActivities_API_FunctionApp.json","true" +"Armis_Alerts_CL","Armis","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armis","armisinc1668090987837","armis-solution","2022-08-02","2024-08-23","","Armis Corporation","Partner","https://support.armis.com/","","domains","ArmisAlertsActivities","Armis","Armis Alerts Activities","The [Armis](https://www.armis.com/) Alerts Activities connector gives the capability to ingest Armis Alerts and Activities into Microsoft Sentinel through the Armis REST API. Refer to the API documentation: `https://.armis.com/api/v1/docs` for more information. The connector provides the ability to get alert and activity information from the Armis platform and to identify and prioritize threats in your environment. Armis uses your existing infrastructure to discover and identify devices without having to deploy any agents. ","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Armis API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias ArmisActivities/ArmisAlerts and load the function code. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Armis API**\n\n Follow these instructions to create an Armis API secret key.\n 1. Log into your Armis instance\n 2. Navigate to Settings -> API Management\n 3. If the secret key has not already been created, press the Create button to create the secret key\n 4. To access the secret key, press the Show button\n 5. The secret key can now be copied and used during the Armis Alerts Activities connector configuration""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Armis Alerts Activities Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of Armis Alerts Activities Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Armis Alerts Activities Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 5 - Create a Keyvault**\n\n Follow these instructions to create a new Keyvault.\n 1. In the Azure portal, Go to **Key vaults**. Click create.\n 2. Select Subsciption, Resource Group and provide unique name of keyvault.\n\n> **NOTE:** Create a separate key vault for each **API key** within one workspace.""}, {""title"": """", ""description"": ""**STEP 6 - Create Access Policy in Keyvault**\n\n Follow these instructions to create access policy in Keyvault.\n 1. Go to keyvaults, select your keyvault, go to Access policies on left side panel. Click create.\n 2. Select all keys & secrets permissions. Click next.\n 3. In the principal section, search by application name which was generated in STEP - 2. Click next.\n\n> **NOTE:** Ensure the Permission model in the Access Configuration of Key Vault is set to **'Vault access policy'**""}, {""title"": """", ""description"": ""**STEP 7 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Armis Alerts Activities data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Armis API Authorization Key(s)"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Armis connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ArmisAlertsActivitiesAPI-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-ArmisAlertsActivitiesAPI-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tArmis Secret Key \n\t\tArmis URL (https://.armis.com/api/v1/) \n\t\tArmis Alert Table Name \n\t\tArmis Activity Table Name \n\t\tSeverity (Default: Low) \n\t\tArmis Schedule \n\t\tKeyVault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tTenant Id \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Armis Alerts Activities data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-ArmisAlertsActivitiesAPI311-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ARMISXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tArmis Secret Key \n\t\tArmis URL (https://.armis.com/api/v1/) \n\t\tArmis Alert Table Name \n\t\tArmis Activity Table Name \n\t\tSeverity (Default: Low) \n\t\tArmis Schedule \n\t\tKeyVault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tTenant Id \n\t\tlogAnalyticsUri (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Armis Secret Key** is required. See the documentation to learn more about API on the `https://.armis.com/api/v1/doc`""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armis/Data%20Connectors/ArmisAlertsActivities/ArmisAlertsActivities_API_FunctionApp.json","true" +"Armis_Devices_CL","Armis","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armis","armisinc1668090987837","armis-solution","2022-08-02","2024-08-23","","Armis Corporation","Partner","https://support.armis.com/","","domains","ArmisDevices","Armis","Armis Devices","The [Armis](https://www.armis.com/) Device connector gives the capability to ingest Armis Devices into Microsoft Sentinel through the Armis REST API. Refer to the API documentation: `https://.armis.com/api/v1/docs` for more information. The connector provides the ability to get device information from the Armis platform. Armis uses your existing infrastructure to discover and identify devices without having to deploy any agents. Armis can also integrate with your existing IT & security management tools to identify and classify each and every device, managed or unmanaged in your environment.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Armis API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://aka.ms/sentinel-ArmisDevice-parser) to create the Kusto functions alias, **ArmisDevice**""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Armis API**\n\n Follow these instructions to create an Armis API secret key.\n 1. Log into your Armis instance\n 2. Navigate to Settings -> API Management\n 3. If the secret key has not already been created, press the Create button to create the secret key\n 4. To access the secret key, press the Show button\n 5. The secret key can now be copied and used during the Armis Device connector configuration""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Armis Device Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of Armis Device Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Armis Device Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 5 - Create a Keyvault**\n\n Follow these instructions to create a new Keyvault.\n 1. In the Azure portal, Go to **Key vaults**. Click create.\n 2. Select Subsciption, Resource Group and provide unique name of keyvault.\n\n> **NOTE:** Create a separate key vault for each **API key** within one workspace.""}, {""title"": """", ""description"": ""**STEP 6 - Create Access Policy in Keyvault**\n\n Follow these instructions to create access policy in Keyvault.\n 1. Go to keyvaults, select your keyvault, go to Access policies on left side panel. Click create.\n 2. Select all keys & secrets permissions. Click next.\n 3. In the principal section, search by application name which was generated in STEP - 2. Click next.\n\n> **NOTE:** Ensure the Permission model in the Access Configuration of Key Vault is set to **'Vault access policy'**""}, {""title"": """", ""description"": ""**STEP 7 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Armis Device data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Armis API Authorization Key(s)"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Armis connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ArmisDevice-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-ArmisDevice-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tArmis Secret Key \n\t\tArmis URL (https://.armis.com/api/v1/) \n\t\tArmis Device Table Name \n\t\tArmis Schedule \n\t\tKeyVault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tTenant Id \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Armis Device data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-ArmisDevice311-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ARMISXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tArmis Secret Key \n\t\tArmis URL (https://.armis.com/api/v1/) \n\t\tArmis Device Table Name \n\t\tArmis Schedule \n\t\tKeyVault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tTenant Id \n\t\tlogAnalyticsUri (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Armis Secret Key** is required. See the documentation to learn more about API on the `https://.armis.com/api/v1/doc`""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armis/Data%20Connectors/ArmisDevice/ArmisDevice_API_FunctionApp.json","true" +"Armorblox_CL","Armorblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armorblox","armorblox1601081599926","armorblox_sentinel_1","2021-10-18","","","Armorblox","Partner","https://www.armorblox.com/contact/","","domains","Armorblox","Armorblox","Armorblox","The [Armorblox](https://www.armorblox.com/) data connector provides the capability to ingest incidents from your Armorblox instance into Microsoft Sentinel through the REST API. The connector provides ability to get events which helps to examine potential security risks, and more.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Armorblox API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Armorblox API**\n\n Follow the instructions to obtain the API token.\n\n1. Log in to the Armorblox portal with your credentials.\n2. In the portal, click **Settings**.\n3. In the **Settings** view, click **API Keys**\n4. Click **Create API Key**.\n5. Enter the required information.\n6. Click **Create**, and copy the API token displayed in the modal.\n7. Save API token for using in the data connector.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Armorblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Armorblox data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-armorblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **ArmorbloxAPIToken**, **ArmorbloxInstanceURL** OR **ArmorbloxInstanceName**, and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Armorblox data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-armorblox-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. Armorblox).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tArmorbloxAPIToken\n\t\tArmorbloxInstanceName OR ArmorbloxInstanceURL\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tLogAnalyticsUri (optional)\n> - Use LogAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Armorblox Instance Details"", ""description"": ""**ArmorbloxInstanceName** OR **ArmorbloxInstanceURL** is required""}, {""name"": ""Armorblox API Credentials"", ""description"": ""**ArmorbloxAPIToken** is required""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Armorblox/Data%20Connectors/Armorblox_API_FunctionApp.json","true" +"CommonSecurityLog","Aruba ClearPass","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Aruba%20ClearPass","azuresentinel","azure-sentinel-solution-arubaclearpass","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ArubaClearPass","Aruba Networks","[Deprecated] Aruba ClearPass via Legacy Agent","The [Aruba ClearPass](https://www.arubanetworks.com/products/security/network-access-control/secure-access/) connector allows you to easily connect your Aruba ClearPass with Microsoft Sentinel, to create custom dashboards, alerts, and improve investigation. This gives you more insight into your organization’s network and improves your security operation capabilities.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias ArubaClearPass and load the function code or click [here](https://aka.ms/sentinel-arubaclearpass-parser).The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Aruba ClearPass logs to a Syslog agent"", ""description"": ""Configure Aruba ClearPass to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent.\n1. [Follow these instructions](https://www.arubanetworks.com/techdocs/ClearPass/6.7/PolicyManager/Content/CPPM_UserGuide/Admin/syslogExportFilters_add_syslog_filter_general.htm) to configure the Aruba ClearPass to forward syslog.\n2. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Aruba%20ClearPass/Data%20Connectors/Connector_Syslog_ArubaClearPass.json","true" +"CommonSecurityLog","Aruba ClearPass","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Aruba%20ClearPass","azuresentinel","azure-sentinel-solution-arubaclearpass","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ArubaClearPassAma","Aruba Networks","[Deprecated] Aruba ClearPass via AMA","The [Aruba ClearPass](https://www.arubanetworks.com/products/security/network-access-control/secure-access/) connector allows you to easily connect your Aruba ClearPass with Microsoft Sentinel, to create custom dashboards, alerts, and improve investigation. This gives you more insight into your organization’s network and improves your security operation capabilities.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias ArubaClearPass and load the function code or click [here](https://aka.ms/sentinel-arubaclearpass-parser).The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Forward Aruba ClearPass logs to a Syslog agent"", ""description"": ""Configure Aruba ClearPass to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent.\n1. [Follow these instructions](https://www.arubanetworks.com/techdocs/ClearPass/6.7/PolicyManager/Content/CPPM_UserGuide/Admin/syslogExportFilters_add_syslog_filter_general.htm) to configure the Aruba ClearPass to forward syslog.\n2. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address."", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Aruba%20ClearPass/Data%20Connectors/template_ArubaClearPassAMA.json","true" +"AtlassianConfluenceNativePoller_CL","AtlassianConfluenceAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AtlassianConfluenceAudit","azuresentinel","azure-sentinel-solution-atlassianconfluenceaudit","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","AtlassianConfluence","Atlassian","Atlassian Confluence","The Atlassian Confluence data connector provides the capability to ingest [Atlassian Confluence audit logs](https://developer.atlassian.com/cloud/confluence/rest/api-group-audit/) into Microsoft Sentinel.","[{""title"": ""Connect Atlassian Confluence"", ""description"": ""Please insert your credentials"", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""Domain Name"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{domain}}"", ""placeHolderValue"": """"}]}, ""type"": ""BasicAuth""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Atlassian Confluence API credentials"", ""description"": ""Confluence Username and Confluence Access Token are required. [See the documentation to learn more about Atlassian Confluence API](https://developer.atlassian.com/cloud/confluence/rest/intro/). Confluence domain must be provided as well.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AtlassianConfluenceAudit/Data%20Connectors/ConfluenceNativePollerConnector/azuredeploy_Confluence_native_poller_connector.json","true" +"Confluence_Audit_CL","AtlassianConfluenceAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AtlassianConfluenceAudit","azuresentinel","azure-sentinel-solution-atlassianconfluenceaudit","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","ConfluenceAuditAPI","Atlassian","[Deprecated] Atlassian Confluence Audit","The [Atlassian Confluence](https://www.atlassian.com/software/confluence) Audit data connector provides the capability to ingest [Confluence Audit Records](https://support.atlassian.com/confluence-cloud/docs/view-the-audit-log/) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE: This data connector has been deprecated, consider moving to the CCP data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Confluence REST API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Confluence API**\n\n [Follow the instructions](https://developer.atlassian.com/cloud/confluence/rest/intro/#auth) to obtain the credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Confluence Audit data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-confluenceaudit-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-confluenceaudit-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **ConfluenceAccessToken**, **ConfluenceUsername**, **ConfluenceHomeSiteName** (short site name part, as example HOMESITENAME from https://community.atlassian.com) and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Confluence Audit data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-confluenceauditapi-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ConflAuditXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tConfluenceUsername\n\t\tConfluenceAccessToken\n\t\tConfluenceHomeSiteName\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**ConfluenceAccessToken**, **ConfluenceUsername** is required for REST API. [See the documentation to learn more about API](https://developer.atlassian.com/cloud/confluence/rest/api-group-audit/). Check all [requirements and follow the instructions](https://developer.atlassian.com/cloud/confluence/rest/intro/#auth) for obtaining credentials.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AtlassianConfluenceAudit/Data%20Connectors/AtlassianConfluenceAuditDataConnector/ConfluenceAudit_API_FunctionApp.json","true" +"ConfluenceAuditLogs_CL","AtlassianConfluenceAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AtlassianConfluenceAudit","azuresentinel","azure-sentinel-solution-atlassianconfluenceaudit","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","ConfluenceAuditCCPDefinition","Microsoft"," Atlassian Confluence Audit (via Codeless Connector Framework)","The [Atlassian Confluence](https://www.atlassian.com/software/confluence) Audit data connector provides the capability to ingest [Confluence Audit Records](https://support.atlassian.com/confluence-cloud/docs/view-the-audit-log/) events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://support.atlassian.com/confluence-cloud/docs/view-the-audit-log/) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""description"": ""To enable the Atlassian Confluence connector for Microsoft Sentinel, click to add an organization, fill the form with the Confluence environment credentials and click to Connect. \n Follow [these steps](https://support.atlassian.com/atlassian-account/docs/manage-api-tokens-for-your-atlassian-account/) to create an API token.\n "", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Atlassian Confluence organization URL"", ""columnValue"": ""properties.request.apiEndpoint""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add organization"", ""title"": ""Add organization"", ""subtitle"": ""Add Atlassian Confluence organization"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Atlassian Confluence organization URL"", ""placeholder"": "".atlassian.net"", ""type"": ""string"", ""name"": ""confluenceorganizationurl""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""User Name"", ""placeholder"": ""User Name (e.g., user@example.com)"", ""type"": ""securestring"", ""name"": ""userid""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""password"", ""name"": ""apikey""}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Atlassian Confluence API access"", ""description"": ""Permission of [Administer Confluence](https://developer.atlassian.com/cloud/confluence/rest/v1/intro/#auth) is required to get access to the Confluence Audit logs API. See [Confluence API documentation](https://developer.atlassian.com/cloud/confluence/rest/v1/api-group-audit/#api-wiki-rest-api-audit-get) to learn more about the audit API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AtlassianConfluenceAudit/Data%20Connectors/AtlassianConfluenceAuditLogs_CCP/AtlassianConfluenceAudit_DataConnectorDefinition.json","true" +"Jira_Audit_CL","AtlassianJiraAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AtlassianJiraAudit","azuresentinel","azure-sentinel-solution-atlassianjiraaudit","2022-01-10","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","JiraAuditAPI","Atlassian","Atlassian Jira Audit","The [Atlassian Jira](https://www.atlassian.com/software/jira) Audit data connector provides the capability to ingest [Jira Audit Records](https://support.atlassian.com/jira-cloud-administration/docs/audit-activities-in-jira-applications/) events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-audit-records/) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Jira REST API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://aka.ms/sentinel-jiraauditapi-parser) to create the Kusto functions alias, **JiraAudit**""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Jira API**\n\n [Follow the instructions](https://developer.atlassian.com/cloud/jira/platform/rest/v3/intro/#authentication) to obtain the credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Jira Audit data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentineljiraauditazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentineljiraauditazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **JiraAccessToken**, **JiraUsername**, **JiraHomeSiteName** (short site name part, as example HOMESITENAME from https://community.atlassian.com) and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Jira Audit data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-jiraauditapi-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. JiraAuditXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tJiraUsername\n\t\tJiraAccessToken\n\t\tJiraHomeSiteName\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**JiraAccessToken**, **JiraUsername** is required for REST API. [See the documentation to learn more about API](https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-audit-records/). Check all [requirements and follow the instructions](https://developer.atlassian.com/cloud/jira/platform/rest/v3/intro/#authentication) for obtaining credentials.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AtlassianJiraAudit/Data%20Connectors/JiraAudit_API_FunctionApp.json","true" +"Jira_Audit_v2_CL","AtlassianJiraAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AtlassianJiraAudit","azuresentinel","azure-sentinel-solution-atlassianjiraaudit","2022-01-10","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","JiraAuditCCPDefinition","Microsoft","Atlassian Jira Audit (using REST API)","The [Atlassian Jira](https://www.atlassian.com/software/jira) Audit data connector provides the capability to ingest [Jira Audit Records](https://support.atlassian.com/jira-cloud-administration/docs/audit-activities-in-jira-applications/) events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-audit-records/) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""description"": ""To enable the Atlassian Jira connector for Microsoft Sentinel, click to add an organization, fill the form with the Jira environment credentials and click to Connect. \n Follow [these steps](https://support.atlassian.com/atlassian-account/docs/manage-api-tokens-for-your-atlassian-account/) to create an API token.\n "", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Atlassian Jira organization URL"", ""columnValue"": ""properties.request.apiEndpoint""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add organization"", ""title"": ""Add organization"", ""subtitle"": ""Add Atlassian Jira organization"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Atlassian Jira organization URL"", ""placeholder"": ""Atlassian Jira organization URL"", ""type"": ""string"", ""name"": ""jiraorganizationurl""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""User Name"", ""placeholder"": ""User Name (e.g., user@example.com)"", ""type"": ""securestring"", ""name"": ""userid""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""API Key"", ""type"": ""password"", ""name"": ""apikey""}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Atlassian Jira API access"", ""description"": ""Permission of [Administer Jira](https://developer.atlassian.com/cloud/jira/platform/rest/v3/intro/#authentication) is required to get access to the Jira Audit logs API. See [Jira API documentation](https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-audit-records/#api-group-audit-records) to learn more about the audit API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AtlassianJiraAudit/Data%20Connectors/JiraAuditAPISentinelConnector_ccpv2/JiraAudit_DataConnectorDefinition.json","true" +"","Attacker Tools Threat Protection Essentials","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Attacker%20Tools%20Threat%20Protection%20Essentials","azuresentinel","azure-sentinel-solution-attackertools","2022-11-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","Australian Cyber Security Centre","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Australian%20Cyber%20Security%20Centre","azuresentinel","azure-sentinel-solution-australiancybersecurity","2022-11-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"Auth0AM_CL","Auth0","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Auth0","azuresentinel","azure-sentinel-solution-auth0","2022-08-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","Auth0","Auth0","Auth0 Access Management","The [Auth0 Access Management](https://auth0.com/access-management) data connector provides the capability to ingest [Auth0 log events](https://auth0.com/docs/api/management/v2/#!/Logs/get_logs) into Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Auth0 Management APIs to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Auth0 Management API**\n\n Follow the instructions to obtain the credentials.\n\n1. In Auth0 Dashboard, go to **Applications > Applications**.\n2. Select your Application. This should be a \""Machine-to-Machine\"" Application configured with at least **read:logs** and **read:logs_users** permissions.\n3. Copy **Domain, ClientID, Client Secret**""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Auth0 Access Management data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Auth0 Access Management data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-Auth0AccessManagement-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the ****Domain, ClientID, Client Secret****, **AzureSentinelWorkspaceId**, **AzureSentinelSharedKey**. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Auth0 Access Management data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-Auth0AccessManagement-azuredeploy) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. Auth0AMXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tDOMAIN\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**API token** is required. [See the documentation to learn more about API token](https://auth0.com/docs/secure/tokens/access-tokens/get-management-api-access-tokens-for-production)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Auth0/Data%20Connectors/Auth0_FunctionApp.json","true" +"Auth0Logs_CL","Auth0","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Auth0","azuresentinel","azure-sentinel-solution-auth0","2022-08-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","Auth0ConnectorCCPDefinition","Microsoft","Auth0 Logs","The [Auth0](https://auth0.com/docs/api/management/v2/logs/get-logs) data connector allows ingesting logs from Auth0 API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses Auth0 API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### STEP 1 - Configuration steps for the Auth0 Management API""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""Follow the instructions to obtain the credentials. \n 1. In Auth0 Dashboard, go to [**Applications > Applications**]\n 2. Select your Application. This should be a [**Machine-to-Machine**] Application configured with at least [**read:logs**] and [**read:logs_users**] permissions. \n 3. Copy [**Domain, ClientID, Client Secret**]""}}, {""parameters"": {""label"": ""Base API URL"", ""placeholder"": ""https://example.auth0.com"", ""type"": ""text"", ""name"": ""Domain""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""Client ID"", ""placeholder"": ""Client ID"", ""type"": ""text"", ""name"": ""ClientId""}, ""type"": ""Textbox""}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client Secret"", ""placeholder"": ""API Token"", ""type"": ""password"", ""name"": ""ClientSecret""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Auth0/Data%20Connectors/Auth0_CCP/DataConnectorDefinition.json","true" +"Authomize_v2_CL","Authomize","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Authomize","authomize","azure-sentinel-solution-authomize","2023-06-15","","","Authomize","Partner","https://support.authomize.com","","domains,verticals","Authomize","Authomize","Authomize Data Connector","The Authomize Data Connector provides the capability to ingest custom log types from Authomize into Microsoft Sentinel.","[{""title"": ""1. Locate your Authomize API key"", ""description"": ""Follow the setup instructions [located under Data Connectors for Authomize](https://github.com/authomize/Open-ITDR/blob/main/Open-Connectors/Platform/Azure-Sentinel/Data%20Connectors/readme.md).""}, {""title"": ""2. Deploy the Authomize data connector using the setup instructions."", ""description"": ""Follow the Instructions on [deploying the data connector to ingest data from Authomize](https://github.com/authomize/Open-ITDR/blob/main/Open-Connectors/Platform/Azure-Sentinel/Data%20Connectors/readme.md).""}, {""title"": ""3. Finalize your setup"", ""description"": ""Validate that your script is running. Simple instructions are located under the [Authomize Data Connector area](https://github.com/authomize/Open-ITDR/blob/main/Open-Connectors/Platform/Azure-Sentinel/Data%20Connectors/readme.md).""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Include custom pre-requisites if the connectivity requires - else delete customs"", ""description"": ""Description for any custom pre-requisite""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Authomize/Data%20Connectors/AuthomizeCustomConnector.json","true" +"AzureActivity","Azure Activity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Activity","azuresentinel","azure-sentinel-solution-azureactivity","2022-04-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureActivity","Microsoft","Azure Activity","Azure Activity Log is a subscription log that provides insight into subscription-level events that occur in Azure, including events from Azure Resource Manager operational data, service health events, write operations taken on the resources in your subscription, and the status of activities performed in Azure. For more information, see the [Microsoft Sentinel documentation ](https://go.microsoft.com/fwlink/p/?linkid=2219695&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""instructions"": [{""parameters"": {""text"": ""This connector has been updated to use the diagnostics settings back-end pipeline. which provides increased functionality and better consistency with resource logs.\nConnectors using this pipeline can also be governed at scale by Azure Policy. Learn more about the new Azure Activity connector.\nFollow the instructions below to upgrade your connector to the diagnostics settings pipeline."", ""visible"": true, ""inline"": false}, ""type"": ""InfoMessage""}]}, {""title"": ""1. Disconnect your subscriptions from the legacy method"", ""description"": ""The subscriptions listed below are still using the older, legacy method. You are strongly encouraged to upgrade to the new pipeline.
\nTo do this, click on the 'Disconnect All' button below, before proceeding to launch the Azure Policy Assignment wizard."", ""instructions"": [{""parameters"": {""datasourceName"": ""AzureActivityLog""}, ""type"": ""OmsDatasource""}]}, {""title"": ""2. Connect your subscriptions through diagnostic settings new pipeline"", ""description"": ""This connector uses Azure Policy to apply a single Azure Subscription log-streaming configuration to a collection of subscriptions, defined as a scope.\nFollow the instructions below to create and apply a policy to all current and future subscriptions. **Note**, you may already have an active policy for this resource type."", ""innerSteps"": [{""title"": ""Launch the Azure Policy Assignment wizard and follow the steps.\u200b"", ""description"": "">1. In the **Basics** tab, click the button with the three dots under **Scope** to select your resources assignment scope.\n >2. In the **Parameters** tab, choose your Microsoft Sentinel workspace from the **Log Analytics workspace** drop-down list, and leave marked as \""True\"" all the log and metric types you want to ingest.\n >3. To apply the policy on your existing resources, select the **Remediation tab** and mark the **Create a remediation task** checkbox."", ""instructions"": [{""parameters"": {""linkType"": ""OpenPolicyAssignment"", ""policyDefinitionGuid"": ""2465583e-4e78-4c15-b6be-a36cbc7c8b0f"", ""assignMode"": 1}, ""type"": ""InstallAgent""}]}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Policy\u200b"", ""description"": ""owner role assigned for each policy assignment scope.\u200b""}, {""name"": ""Subscription"", ""description"": ""owner role permission on the relevant subscription""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Activity/Data%20Connectors/AzureActivity.json","true" +"","Azure Batch Account","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Batch%20Account","azuresentinel","azure-sentinel-solution-batchaccount","2022-06-30","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"fluentbit_CL","Azure Cloud NGFW by Palo Alto Networks","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Cloud%20NGFW%20by%20Palo%20Alto%20Networks","paloaltonetworks","cloudngfw-sentinel-solution","2023-11-03","2023-11-03","","Palo Alto Networks","Partner","https://support.paloaltonetworks.com","","domains","AzureCloudNGFWByPaloAltoNetworks","Palo Alto Networks","Azure CloudNGFW By Palo Alto Networks","Cloud Next-Generation Firewall by Palo Alto Networks - an Azure Native ISV Service - is Palo Alto Networks Next-Generation Firewall (NGFW) delivered as a cloud-native service on Azure. You can discover Cloud NGFW in the Azure Marketplace and consume it in your Azure Virtual Networks (VNet). With Cloud NGFW, you can access the core NGFW capabilities such as App-ID, URL filtering based technologies. It provides threat prevention and detection through cloud-delivered security services and threat prevention signatures. The connector allows you to easily connect your Cloud NGFW logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities. For more information, see the [Cloud NGFW for Azure documentation](https://docs.paloaltonetworks.com/cloud-ngfw/azure).","[{""title"": ""Connect Cloud NGFW by Palo Alto Networks to Microsoft Sentinel"", ""description"": ""Enable Log Settings on All Cloud NGFWs by Palo Alto Networks."", ""instructions"": [{""parameters"": {""linkType"": ""OpenCloudNGFW""}, ""type"": ""ConfigureLogSettings""}]}, {""title"": """", ""description"": ""Inside your Cloud NGFW resource:\n\n1. Navigate to the **Log Settings** from the homepage.\n2. Ensure the **Enable Log Settings** checkbox is checked.\n3. From the **Log Settings** drop-down, choose the desired Log Analytics Workspace.\n4. Confirm your selections and configurations.\n5. Click **Save** to apply the settings.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Cloud%20NGFW%20by%20Palo%20Alto%20Networks/Data%20Connectors/CloudNgfwByPAN.json","true" +"","Azure Cognitive Search","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Cognitive%20Search","azuresentinel","azure-sentinel-solution-azurecognitivesearch","2022-06-28","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","Azure DDoS Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20DDoS%20Protection","azuresentinel","azure-sentinel-solution-azureddosprotection","2022-05-13","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","Azure Data Lake Storage Gen1","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Data%20Lake%20Storage%20Gen1","azuresentinel","azure-sentinel-solution-datalakestoragegen1","2022-06-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"","Azure Event Hubs","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Event%20Hubs","azuresentinel","azure-sentinel-solution-eventhub","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","Azure Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Firewall","sentinel4azurefirewall","sentinel4azurefirewall","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"","Azure Key Vault","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Key%20Vault","azuresentinel","azure-sentinel-solution-azurekeyvault","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","Azure Logic Apps","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Logic%20Apps","azuresentinel","azure-sentinel-solution-logicapps","2022-06-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","Azure Network Security Groups","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Network%20Security%20Groups","azuresentinel","azure-sentinel-solution-networksecuritygroup","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","Azure SQL Database solution for sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20SQL%20Database%20solution%20for%20sentinel","sentinel4sql","sentinel4sql","2022-08-19","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"","Azure Service Bus","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Service%20Bus","azuresentinel","azure-sentinel-solution-servicebus","2022-06-29","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","Azure Storage","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Storage","azuresentinel","azure-sentinel-solution-azurestorageaccount","2022-05-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","Azure Stream Analytics","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Stream%20Analytics","azuresentinel","azure-sentinel-solution-streamanalytics","2022-06-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","Azure Web Application Firewall (WAF)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20Web%20Application%20Firewall%20%28WAF%29","azuresentinel","azure-sentinel-solution-azurewebapplicationfirewal","2022-05-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"","Azure kubernetes Service","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Azure%20kubernetes%20Service","azuresentinel","azure-sentinel-solution-azurekubernetes","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"ADOAuditLogs_CL","AzureDevOpsAuditing","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AzureDevOpsAuditing","azuresentinel","azure-sentinel-solution-azuredevopsauditing","2022-09-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","AzureDevOpsAuditLogs","Microsoft","Azure DevOps Audit Logs (via Codeless Connector Platform)","The Azure DevOps Audit Logs data connector allows you to ingest audit events from Azure DevOps into Microsoft Sentinel. This data connector is built using the Microsoft Sentinel Codeless Connector Platform, ensuring seamless integration. It leverages the Azure DevOps Audit Logs API to fetch detailed audit events and supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview). These transformations enable parsing of the received audit data into a custom table during ingestion, improving query performance by eliminating the need for additional parsing. By using this connector, you can gain enhanced visibility into your Azure DevOps environment and streamline your security operations.","[{""title"": ""Connect to Azure DevOps to start collecting Audit logs in Microsoft Sentinel.\n"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""\n1. Enter the App you have registered.\n 2. In the 'Overview' section, copy the Application (client) ID.\n 3. Select the 'Endpoints' button, and copy the 'OAuth 2.0 authorization endpoint (v2)' value and the 'OAuth 2.0 token endpoint (v2)' value.\n 4. In the 'Certificates & secrets' section, copy the 'Client Secret value', and store it securely.\n5. Provide the required information below and click 'Connect'.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Token Endpoint"", ""name"": ""tokenEndpoint"", ""placeholder"": ""https://login.microsoftonline.com/{TenantId}/oauth2/v2.0/token"", ""type"": ""text"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Authorization Endpoint"", ""name"": ""authorizationEndpoint"", ""placeholder"": ""https://login.microsoftonline.com/{TenantId}/oauth2/v2.0/authorize"", ""type"": ""text"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Endpoint"", ""name"": ""apiEndpoint"", ""placeholder"": ""https://auditservice.dev.azure.com/{organizationName}/_apis/audit/auditlog?api-version=7.2-preview"", ""type"": ""text"", ""validations"": {""required"": true}}}, {""type"": ""OAuthForm"", ""parameters"": {""clientIdLabel"": ""App Client ID"", ""clientSecretLabel"": ""App Client Secret"", ""connectButtonLabel"": ""Connect"", ""disconnectButtonLabel"": ""Disconnect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure DevOps Prerequisite"", ""description"": ""Please ensure the following:
1. Register an Entra App in Microsoft Entra Admin Center under App Registrations.
2. In 'API permissions' - add Permissions to 'Azure DevOps - vso.auditlog'.
3. In 'Certificates & secrets' - generate 'Client secret'.
4. In 'Authentication' - add Redirect URI: 'https://portal.azure.com/TokenAuthorize/ExtensionName/Microsoft_Azure_Security_Insights'.
5. In the Azure DevOps settings - enable audit log and set **View audit log** for the user. [Azure DevOps Auditing](https://learn.microsoft.com/en-us/azure/devops/organizations/audit/azure-devops-auditing?view=azure-devops&tabs=preview-page).
6. Ensure the user assigned to connect the data connector has the View audit logs permission explicitly set to Allow at all times. This permission is essential for successful log ingestion. If the permission is revoked or not granted, data ingestion will fail or be interrupted.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AzureDevOpsAuditing/Data%20Connectors/AzureDevOpsAuditLogs_CCP/AzureDevOpsAuditLogs_DataConnectorDefinition.json","true" +"","AzureSecurityBenchmark","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/AzureSecurityBenchmark","azuresentinel","azure-sentinel-solution-azuresecuritybenchmark","2022-06-17","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"BetterMTDAppLog_CL","BETTER Mobile Threat Defense (MTD)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BETTER%20Mobile%20Threat%20Defense%20%28MTD%29","bettermobilesecurityinc","better_mtd_mss","2022-05-02","","","Better Mobile Security Inc.","Partner","https://www.better.mobi/about#contact-us","","domains","BetterMTD","BETTER Mobile","BETTER Mobile Threat Defense (MTD)","The BETTER MTD Connector allows Enterprises to connect their Better MTD instances with Microsoft Sentinel, to view their data in Dashboards, create custom alerts, use it to trigger playbooks and expands threat hunting capabilities. This gives users more insight into their organization's mobile devices and ability to quickly analyze current mobile security posture which improves their overall SecOps capabilities.","[{""title"": """", ""description"": ""1. In **Better MTD Console**, click on **Integration** on the side bar.\n2. Select **Others** tab.\n3. Click the **ADD ACCOUNT** button and Select **Microsoft Sentinel** from the available integrations.\n4. Create the Integration:\n - set `ACCOUNT NAME` to a descriptive name that identifies the integration then click **Next**\n - Enter your `WORKSPACE ID` and `PRIMARY KEY` from the fields below, click **Save**\n - Click **Done**\n5. Threat Policy setup (Which Incidents should be reported to `Microsoft Sentinel`):\n - In **Better MTD Console**, click on **Policies** on the side bar\n - Click on the **Edit** button of the Policy that you are using.\n - For each Incident types that you want to be logged go to **Send to Integrations** field and select **Sentinel**\n6. For additional information, please refer to our [Documentation](https://mtd-docs.bmobi.net/integrations/how-to-setup-azure-sentinel-integration#mtd-integration-configuration)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BETTER%20Mobile%20Threat%20Defense%20%28MTD%29/Data%20Connectors/BETTERMTD.json","true" +"BetterMTDDeviceLog_CL","BETTER Mobile Threat Defense (MTD)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BETTER%20Mobile%20Threat%20Defense%20%28MTD%29","bettermobilesecurityinc","better_mtd_mss","2022-05-02","","","Better Mobile Security Inc.","Partner","https://www.better.mobi/about#contact-us","","domains","BetterMTD","BETTER Mobile","BETTER Mobile Threat Defense (MTD)","The BETTER MTD Connector allows Enterprises to connect their Better MTD instances with Microsoft Sentinel, to view their data in Dashboards, create custom alerts, use it to trigger playbooks and expands threat hunting capabilities. This gives users more insight into their organization's mobile devices and ability to quickly analyze current mobile security posture which improves their overall SecOps capabilities.","[{""title"": """", ""description"": ""1. In **Better MTD Console**, click on **Integration** on the side bar.\n2. Select **Others** tab.\n3. Click the **ADD ACCOUNT** button and Select **Microsoft Sentinel** from the available integrations.\n4. Create the Integration:\n - set `ACCOUNT NAME` to a descriptive name that identifies the integration then click **Next**\n - Enter your `WORKSPACE ID` and `PRIMARY KEY` from the fields below, click **Save**\n - Click **Done**\n5. Threat Policy setup (Which Incidents should be reported to `Microsoft Sentinel`):\n - In **Better MTD Console**, click on **Policies** on the side bar\n - Click on the **Edit** button of the Policy that you are using.\n - For each Incident types that you want to be logged go to **Send to Integrations** field and select **Sentinel**\n6. For additional information, please refer to our [Documentation](https://mtd-docs.bmobi.net/integrations/how-to-setup-azure-sentinel-integration#mtd-integration-configuration)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BETTER%20Mobile%20Threat%20Defense%20%28MTD%29/Data%20Connectors/BETTERMTD.json","true" +"BetterMTDIncidentLog_CL","BETTER Mobile Threat Defense (MTD)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BETTER%20Mobile%20Threat%20Defense%20%28MTD%29","bettermobilesecurityinc","better_mtd_mss","2022-05-02","","","Better Mobile Security Inc.","Partner","https://www.better.mobi/about#contact-us","","domains","BetterMTD","BETTER Mobile","BETTER Mobile Threat Defense (MTD)","The BETTER MTD Connector allows Enterprises to connect their Better MTD instances with Microsoft Sentinel, to view their data in Dashboards, create custom alerts, use it to trigger playbooks and expands threat hunting capabilities. This gives users more insight into their organization's mobile devices and ability to quickly analyze current mobile security posture which improves their overall SecOps capabilities.","[{""title"": """", ""description"": ""1. In **Better MTD Console**, click on **Integration** on the side bar.\n2. Select **Others** tab.\n3. Click the **ADD ACCOUNT** button and Select **Microsoft Sentinel** from the available integrations.\n4. Create the Integration:\n - set `ACCOUNT NAME` to a descriptive name that identifies the integration then click **Next**\n - Enter your `WORKSPACE ID` and `PRIMARY KEY` from the fields below, click **Save**\n - Click **Done**\n5. Threat Policy setup (Which Incidents should be reported to `Microsoft Sentinel`):\n - In **Better MTD Console**, click on **Policies** on the side bar\n - Click on the **Edit** button of the Policy that you are using.\n - For each Incident types that you want to be logged go to **Send to Integrations** field and select **Sentinel**\n6. For additional information, please refer to our [Documentation](https://mtd-docs.bmobi.net/integrations/how-to-setup-azure-sentinel-integration#mtd-integration-configuration)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BETTER%20Mobile%20Threat%20Defense%20%28MTD%29/Data%20Connectors/BETTERMTD.json","true" +"BetterMTDNetflowLog_CL","BETTER Mobile Threat Defense (MTD)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BETTER%20Mobile%20Threat%20Defense%20%28MTD%29","bettermobilesecurityinc","better_mtd_mss","2022-05-02","","","Better Mobile Security Inc.","Partner","https://www.better.mobi/about#contact-us","","domains","BetterMTD","BETTER Mobile","BETTER Mobile Threat Defense (MTD)","The BETTER MTD Connector allows Enterprises to connect their Better MTD instances with Microsoft Sentinel, to view their data in Dashboards, create custom alerts, use it to trigger playbooks and expands threat hunting capabilities. This gives users more insight into their organization's mobile devices and ability to quickly analyze current mobile security posture which improves their overall SecOps capabilities.","[{""title"": """", ""description"": ""1. In **Better MTD Console**, click on **Integration** on the side bar.\n2. Select **Others** tab.\n3. Click the **ADD ACCOUNT** button and Select **Microsoft Sentinel** from the available integrations.\n4. Create the Integration:\n - set `ACCOUNT NAME` to a descriptive name that identifies the integration then click **Next**\n - Enter your `WORKSPACE ID` and `PRIMARY KEY` from the fields below, click **Save**\n - Click **Done**\n5. Threat Policy setup (Which Incidents should be reported to `Microsoft Sentinel`):\n - In **Better MTD Console**, click on **Policies** on the side bar\n - Click on the **Edit** button of the Policy that you are using.\n - For each Incident types that you want to be logged go to **Send to Integrations** field and select **Sentinel**\n6. For additional information, please refer to our [Documentation](https://mtd-docs.bmobi.net/integrations/how-to-setup-azure-sentinel-integration#mtd-integration-configuration)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BETTER%20Mobile%20Threat%20Defense%20%28MTD%29/Data%20Connectors/BETTERMTD.json","true" +"Syslog","Barracuda CloudGen Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Barracuda%20CloudGen%20Firewall","microsoftsentinelcommunity","azure-sentinel-solution-barracudacloudgenfirewall","2021-05-02","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","BarracudaCloudFirewall","Barracuda","[Deprecated] Barracuda CloudGen Firewall","The Barracuda CloudGen Firewall (CGFW) connector allows you to easily connect your Barracuda CGFW logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias CGFWFirewallActivity and load the function code or click [here](https://aka.ms/sentinel-barracudacloudfirewall-parser). The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n2. Select **Apply below configuration to my machines** and select the facilities and severities.\n3. Click **Save**.""}, {""title"": ""Configure and connect the Barracuda CloudGen Firewall"", ""description"": ""[Follow instructions](https://aka.ms/sentinel-barracudacloudfirewall-connector) to configure syslog streaming. Use the IP address or hostname for the Linux machine with the Microsoft Sentinel agent installed for the Destination IP address."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Barracuda CloudGen Firewall"", ""description"": ""must be configured to export logs via Syslog""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Barracuda%20CloudGen%20Firewall/Data%20Connectors/template_BarracudaCloudFirewall.json","true" +"Barracuda_CL","Barracuda WAF","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Barracuda%20WAF","barracudanetworks","barracuda_web_application_firewall_mss","2022-05-13","","","Barracuda","Partner","https://www.barracuda.com/support","","domains","Barracuda","Barracuda","[Deprecated] Barracuda Web Application Firewall via Legacy Agent","The Barracuda Web Application Firewall (WAF) connector allows you to easily connect your Barracuda logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization’s network and improves your security operation capabilities.

[For more information >​](https://aka.ms/CEF-Barracuda)","[{""title"": ""Configure and connect Barracuda WAF"", ""description"": ""The Barracuda Web Application Firewall can integrate with and export logs directly to Microsoft Sentinel via Azure OMS Server.\u200b\n\n1. Go to [Barracuda WAF configuration](https://aka.ms/asi-barracuda-connector), and follow the instructions, using the parameters below to set up the connection:.\n\n2. Web Firewall logs facility: Go to the advanced settings (link below) for your workspace and on the **Data > Syslog** tabs, make sure that the facility exists.\u200b\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}, {""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Barracuda%20WAF/Data%20Connectors/template_Barracuda.json","true" +"CommonSecurityLog","Barracuda WAF","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Barracuda%20WAF","barracudanetworks","barracuda_web_application_firewall_mss","2022-05-13","","","Barracuda","Partner","https://www.barracuda.com/support","","domains","Barracuda","Barracuda","[Deprecated] Barracuda Web Application Firewall via Legacy Agent","The Barracuda Web Application Firewall (WAF) connector allows you to easily connect your Barracuda logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization’s network and improves your security operation capabilities.

[For more information >​](https://aka.ms/CEF-Barracuda)","[{""title"": ""Configure and connect Barracuda WAF"", ""description"": ""The Barracuda Web Application Firewall can integrate with and export logs directly to Microsoft Sentinel via Azure OMS Server.\u200b\n\n1. Go to [Barracuda WAF configuration](https://aka.ms/asi-barracuda-connector), and follow the instructions, using the parameters below to set up the connection:.\n\n2. Web Firewall logs facility: Go to the advanced settings (link below) for your workspace and on the **Data > Syslog** tabs, make sure that the facility exists.\u200b\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}, {""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Barracuda%20WAF/Data%20Connectors/template_Barracuda.json","true" +"barracuda_CL","Barracuda WAF","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Barracuda%20WAF","barracudanetworks","barracuda_web_application_firewall_mss","2022-05-13","","","Barracuda","Partner","https://www.barracuda.com/support","","domains","Barracuda","Barracuda","[Deprecated] Barracuda Web Application Firewall via Legacy Agent","The Barracuda Web Application Firewall (WAF) connector allows you to easily connect your Barracuda logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization’s network and improves your security operation capabilities.

[For more information >​](https://aka.ms/CEF-Barracuda)","[{""title"": ""Configure and connect Barracuda WAF"", ""description"": ""The Barracuda Web Application Firewall can integrate with and export logs directly to Microsoft Sentinel via Azure OMS Server.\u200b\n\n1. Go to [Barracuda WAF configuration](https://aka.ms/asi-barracuda-connector), and follow the instructions, using the parameters below to set up the connection:.\n\n2. Web Firewall logs facility: Go to the advanced settings (link below) for your workspace and on the **Data > Syslog** tabs, make sure that the facility exists.\u200b\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}, {""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Barracuda%20WAF/Data%20Connectors/template_Barracuda.json","true" +"beSECURE_Audit_CL","Beyond Security beSECURE","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Beyond%20Security%20beSECURE","azuresentinel","azure-sentinel-solution-isvtesting12","2022-05-02","","","Beyond Security","Partner","https://beyondsecurity.freshdesk.com/support/home","","domains","BeyondSecuritybeSECURE","Beyond Security","Beyond Security beSECURE","The [Beyond Security beSECURE](https://beyondsecurity.com/) connector allows you to easily connect your Beyond Security beSECURE scan events, scan results and audit trail with Azure Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": ""Configure beSECURE"", ""description"": ""Follow the steps below to configure your beSECURE solution to send out scan results, scan status and audit trail to Azure Sentinel."", ""innerSteps"": [{""title"": ""1. Access the Integration menu"", ""description"": ""1.1 Click on the 'More' menu option\n\n1.2 Select Server\n\n1.3 Select Integration\n\n1.4 Enable Azure Sentinel""}, {""title"": ""2. Provide Azure Sentinel settings"", ""description"": ""Fill in the Workspace ID and Primary Key values, click 'Modify'"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Beyond%20Security%20beSECURE/Data%20Connectors/Beyond%20Security%20beSECURE.json","true" +"beSECURE_ScanEvent_CL","Beyond Security beSECURE","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Beyond%20Security%20beSECURE","azuresentinel","azure-sentinel-solution-isvtesting12","2022-05-02","","","Beyond Security","Partner","https://beyondsecurity.freshdesk.com/support/home","","domains","BeyondSecuritybeSECURE","Beyond Security","Beyond Security beSECURE","The [Beyond Security beSECURE](https://beyondsecurity.com/) connector allows you to easily connect your Beyond Security beSECURE scan events, scan results and audit trail with Azure Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": ""Configure beSECURE"", ""description"": ""Follow the steps below to configure your beSECURE solution to send out scan results, scan status and audit trail to Azure Sentinel."", ""innerSteps"": [{""title"": ""1. Access the Integration menu"", ""description"": ""1.1 Click on the 'More' menu option\n\n1.2 Select Server\n\n1.3 Select Integration\n\n1.4 Enable Azure Sentinel""}, {""title"": ""2. Provide Azure Sentinel settings"", ""description"": ""Fill in the Workspace ID and Primary Key values, click 'Modify'"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Beyond%20Security%20beSECURE/Data%20Connectors/Beyond%20Security%20beSECURE.json","true" +"beSECURE_ScanResults_CL","Beyond Security beSECURE","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Beyond%20Security%20beSECURE","azuresentinel","azure-sentinel-solution-isvtesting12","2022-05-02","","","Beyond Security","Partner","https://beyondsecurity.freshdesk.com/support/home","","domains","BeyondSecuritybeSECURE","Beyond Security","Beyond Security beSECURE","The [Beyond Security beSECURE](https://beyondsecurity.com/) connector allows you to easily connect your Beyond Security beSECURE scan events, scan results and audit trail with Azure Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": ""Configure beSECURE"", ""description"": ""Follow the steps below to configure your beSECURE solution to send out scan results, scan status and audit trail to Azure Sentinel."", ""innerSteps"": [{""title"": ""1. Access the Integration menu"", ""description"": ""1.1 Click on the 'More' menu option\n\n1.2 Select Server\n\n1.3 Select Integration\n\n1.4 Enable Azure Sentinel""}, {""title"": ""2. Provide Azure Sentinel settings"", ""description"": ""Fill in the Workspace ID and Primary Key values, click 'Modify'"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Beyond%20Security%20beSECURE/Data%20Connectors/Beyond%20Security%20beSECURE.json","true" +"BigIDDSPMCatalog_CL","BigID","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BigID","bigid","azure-sentinel-solution-bigid","2025-10-07","","","BigID","Partner","https://www.bigid.com/support","","domains","BigIDDSPMLogsConnectorDefinition","BigID","BigID DSPM connector","The [BigID DSPM](https://bigid.com/data-security-posture-management/) data connector provides the capability to ingest BigID DSPM cases with affected objects and datasource information into Microsoft Sentinel.","[{""description"": ""Provide your BigID domain name like 'customer.bigid.cloud' and your BigID token. Generate a token in the BigID console via Settings -> Access Management -> Users -> Select User and generate a token."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""BigID FQDN"", ""placeholder"": ""BigID FQDN"", ""type"": ""text"", ""name"": ""bigidFqdn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""BigID Token"", ""placeholder"": ""BigID Token"", ""type"": ""password"", ""name"": ""bigidToken"", ""validations"": {""required"": true}}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""title"": ""Connect to BigID DSPM API to start collecting BigID DSPM cases and affected Objects in Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""BigID DSPM API access"", ""description"": ""Access to the BigID DSPM API through a BigID Token is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BigID/Data%20Connectors/BigIDDSPMLogs_ccp/BigIDDSPMLogs_connectorDefinition.json","true" +"BitsightAlerts_data_CL","BitSight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight","bitsighttechnologiesinc1695119434818","bitsight_v1","2023-02-20","2024-02-20","","BitSight Support","Partner","https://www.bitsight.com/customer-success-support","","domains","BitSight","BitSight Technologies, Inc.","Bitsight data connector","The [BitSight](https://www.BitSight.com/) Data Connector supports evidence-based cyber risk monitoring by bringing BitSight data in Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the BitSight API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create/Get Bitsight API Token**\n\n Follow these instructions to get a BitSight API Token.\n 1. For SPM App: Refer to the [User Preference](https://service.bitsight.com/app/spm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 2. For TPRM App: Refer to the [User Preference](https://service.bitsight.com/app/tprm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 3. For Classic BitSight: Go to your [Account](https://service.bitsight.com/settings) page, \n\t\tGo to Settings > Account > API Token.""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of BitSight Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the BitSight data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the BitSight API Token."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the BitSight connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-BitSight-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Review + create** to deploy..""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the BitSight data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-BitSight310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. BitSightXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""BitSight API Token is required. See the documentation to [learn more](https://help.bitsighttech.com/hc/en-us/articles/115014888388-API-Token-Management) about API Token.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight/Data%20Connectors/BitSightDataConnector/BitSight_API_FunctionApp.json","true" +"BitsightBreaches_data_CL","BitSight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight","bitsighttechnologiesinc1695119434818","bitsight_v1","2023-02-20","2024-02-20","","BitSight Support","Partner","https://www.bitsight.com/customer-success-support","","domains","BitSight","BitSight Technologies, Inc.","Bitsight data connector","The [BitSight](https://www.BitSight.com/) Data Connector supports evidence-based cyber risk monitoring by bringing BitSight data in Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the BitSight API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create/Get Bitsight API Token**\n\n Follow these instructions to get a BitSight API Token.\n 1. For SPM App: Refer to the [User Preference](https://service.bitsight.com/app/spm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 2. For TPRM App: Refer to the [User Preference](https://service.bitsight.com/app/tprm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 3. For Classic BitSight: Go to your [Account](https://service.bitsight.com/settings) page, \n\t\tGo to Settings > Account > API Token.""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of BitSight Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the BitSight data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the BitSight API Token."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the BitSight connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-BitSight-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Review + create** to deploy..""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the BitSight data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-BitSight310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. BitSightXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""BitSight API Token is required. See the documentation to [learn more](https://help.bitsighttech.com/hc/en-us/articles/115014888388-API-Token-Management) about API Token.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight/Data%20Connectors/BitSightDataConnector/BitSight_API_FunctionApp.json","true" +"BitsightCompany_details_CL","BitSight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight","bitsighttechnologiesinc1695119434818","bitsight_v1","2023-02-20","2024-02-20","","BitSight Support","Partner","https://www.bitsight.com/customer-success-support","","domains","BitSight","BitSight Technologies, Inc.","Bitsight data connector","The [BitSight](https://www.BitSight.com/) Data Connector supports evidence-based cyber risk monitoring by bringing BitSight data in Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the BitSight API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create/Get Bitsight API Token**\n\n Follow these instructions to get a BitSight API Token.\n 1. For SPM App: Refer to the [User Preference](https://service.bitsight.com/app/spm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 2. For TPRM App: Refer to the [User Preference](https://service.bitsight.com/app/tprm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 3. For Classic BitSight: Go to your [Account](https://service.bitsight.com/settings) page, \n\t\tGo to Settings > Account > API Token.""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of BitSight Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the BitSight data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the BitSight API Token."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the BitSight connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-BitSight-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Review + create** to deploy..""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the BitSight data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-BitSight310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. BitSightXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""BitSight API Token is required. See the documentation to [learn more](https://help.bitsighttech.com/hc/en-us/articles/115014888388-API-Token-Management) about API Token.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight/Data%20Connectors/BitSightDataConnector/BitSight_API_FunctionApp.json","true" +"BitsightCompany_rating_details_CL","BitSight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight","bitsighttechnologiesinc1695119434818","bitsight_v1","2023-02-20","2024-02-20","","BitSight Support","Partner","https://www.bitsight.com/customer-success-support","","domains","BitSight","BitSight Technologies, Inc.","Bitsight data connector","The [BitSight](https://www.BitSight.com/) Data Connector supports evidence-based cyber risk monitoring by bringing BitSight data in Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the BitSight API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create/Get Bitsight API Token**\n\n Follow these instructions to get a BitSight API Token.\n 1. For SPM App: Refer to the [User Preference](https://service.bitsight.com/app/spm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 2. For TPRM App: Refer to the [User Preference](https://service.bitsight.com/app/tprm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 3. For Classic BitSight: Go to your [Account](https://service.bitsight.com/settings) page, \n\t\tGo to Settings > Account > API Token.""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of BitSight Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the BitSight data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the BitSight API Token."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the BitSight connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-BitSight-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Review + create** to deploy..""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the BitSight data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-BitSight310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. BitSightXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""BitSight API Token is required. See the documentation to [learn more](https://help.bitsighttech.com/hc/en-us/articles/115014888388-API-Token-Management) about API Token.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight/Data%20Connectors/BitSightDataConnector/BitSight_API_FunctionApp.json","true" +"BitsightDiligence_historical_statistics_CL","BitSight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight","bitsighttechnologiesinc1695119434818","bitsight_v1","2023-02-20","2024-02-20","","BitSight Support","Partner","https://www.bitsight.com/customer-success-support","","domains","BitSight","BitSight Technologies, Inc.","Bitsight data connector","The [BitSight](https://www.BitSight.com/) Data Connector supports evidence-based cyber risk monitoring by bringing BitSight data in Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the BitSight API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create/Get Bitsight API Token**\n\n Follow these instructions to get a BitSight API Token.\n 1. For SPM App: Refer to the [User Preference](https://service.bitsight.com/app/spm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 2. For TPRM App: Refer to the [User Preference](https://service.bitsight.com/app/tprm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 3. For Classic BitSight: Go to your [Account](https://service.bitsight.com/settings) page, \n\t\tGo to Settings > Account > API Token.""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of BitSight Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the BitSight data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the BitSight API Token."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the BitSight connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-BitSight-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Review + create** to deploy..""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the BitSight data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-BitSight310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. BitSightXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""BitSight API Token is required. See the documentation to [learn more](https://help.bitsighttech.com/hc/en-us/articles/115014888388-API-Token-Management) about API Token.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight/Data%20Connectors/BitSightDataConnector/BitSight_API_FunctionApp.json","true" +"BitsightDiligence_statistics_CL","BitSight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight","bitsighttechnologiesinc1695119434818","bitsight_v1","2023-02-20","2024-02-20","","BitSight Support","Partner","https://www.bitsight.com/customer-success-support","","domains","BitSight","BitSight Technologies, Inc.","Bitsight data connector","The [BitSight](https://www.BitSight.com/) Data Connector supports evidence-based cyber risk monitoring by bringing BitSight data in Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the BitSight API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create/Get Bitsight API Token**\n\n Follow these instructions to get a BitSight API Token.\n 1. For SPM App: Refer to the [User Preference](https://service.bitsight.com/app/spm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 2. For TPRM App: Refer to the [User Preference](https://service.bitsight.com/app/tprm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 3. For Classic BitSight: Go to your [Account](https://service.bitsight.com/settings) page, \n\t\tGo to Settings > Account > API Token.""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of BitSight Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the BitSight data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the BitSight API Token."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the BitSight connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-BitSight-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Review + create** to deploy..""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the BitSight data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-BitSight310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. BitSightXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""BitSight API Token is required. See the documentation to [learn more](https://help.bitsighttech.com/hc/en-us/articles/115014888388-API-Token-Management) about API Token.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight/Data%20Connectors/BitSightDataConnector/BitSight_API_FunctionApp.json","true" +"BitsightFindings_data_CL","BitSight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight","bitsighttechnologiesinc1695119434818","bitsight_v1","2023-02-20","2024-02-20","","BitSight Support","Partner","https://www.bitsight.com/customer-success-support","","domains","BitSight","BitSight Technologies, Inc.","Bitsight data connector","The [BitSight](https://www.BitSight.com/) Data Connector supports evidence-based cyber risk monitoring by bringing BitSight data in Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the BitSight API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create/Get Bitsight API Token**\n\n Follow these instructions to get a BitSight API Token.\n 1. For SPM App: Refer to the [User Preference](https://service.bitsight.com/app/spm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 2. For TPRM App: Refer to the [User Preference](https://service.bitsight.com/app/tprm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 3. For Classic BitSight: Go to your [Account](https://service.bitsight.com/settings) page, \n\t\tGo to Settings > Account > API Token.""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of BitSight Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the BitSight data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the BitSight API Token."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the BitSight connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-BitSight-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Review + create** to deploy..""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the BitSight data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-BitSight310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. BitSightXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""BitSight API Token is required. See the documentation to [learn more](https://help.bitsighttech.com/hc/en-us/articles/115014888388-API-Token-Management) about API Token.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight/Data%20Connectors/BitSightDataConnector/BitSight_API_FunctionApp.json","true" +"BitsightFindings_summary_CL","BitSight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight","bitsighttechnologiesinc1695119434818","bitsight_v1","2023-02-20","2024-02-20","","BitSight Support","Partner","https://www.bitsight.com/customer-success-support","","domains","BitSight","BitSight Technologies, Inc.","Bitsight data connector","The [BitSight](https://www.BitSight.com/) Data Connector supports evidence-based cyber risk monitoring by bringing BitSight data in Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the BitSight API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create/Get Bitsight API Token**\n\n Follow these instructions to get a BitSight API Token.\n 1. For SPM App: Refer to the [User Preference](https://service.bitsight.com/app/spm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 2. For TPRM App: Refer to the [User Preference](https://service.bitsight.com/app/tprm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 3. For Classic BitSight: Go to your [Account](https://service.bitsight.com/settings) page, \n\t\tGo to Settings > Account > API Token.""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of BitSight Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the BitSight data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the BitSight API Token."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the BitSight connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-BitSight-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Review + create** to deploy..""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the BitSight data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-BitSight310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. BitSightXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""BitSight API Token is required. See the documentation to [learn more](https://help.bitsighttech.com/hc/en-us/articles/115014888388-API-Token-Management) about API Token.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight/Data%20Connectors/BitSightDataConnector/BitSight_API_FunctionApp.json","true" +"BitsightGraph_data_CL","BitSight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight","bitsighttechnologiesinc1695119434818","bitsight_v1","2023-02-20","2024-02-20","","BitSight Support","Partner","https://www.bitsight.com/customer-success-support","","domains","BitSight","BitSight Technologies, Inc.","Bitsight data connector","The [BitSight](https://www.BitSight.com/) Data Connector supports evidence-based cyber risk monitoring by bringing BitSight data in Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the BitSight API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create/Get Bitsight API Token**\n\n Follow these instructions to get a BitSight API Token.\n 1. For SPM App: Refer to the [User Preference](https://service.bitsight.com/app/spm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 2. For TPRM App: Refer to the [User Preference](https://service.bitsight.com/app/tprm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 3. For Classic BitSight: Go to your [Account](https://service.bitsight.com/settings) page, \n\t\tGo to Settings > Account > API Token.""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of BitSight Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the BitSight data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the BitSight API Token."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the BitSight connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-BitSight-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Review + create** to deploy..""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the BitSight data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-BitSight310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. BitSightXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""BitSight API Token is required. See the documentation to [learn more](https://help.bitsighttech.com/hc/en-us/articles/115014888388-API-Token-Management) about API Token.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight/Data%20Connectors/BitSightDataConnector/BitSight_API_FunctionApp.json","true" +"BitsightIndustrial_statistics_CL","BitSight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight","bitsighttechnologiesinc1695119434818","bitsight_v1","2023-02-20","2024-02-20","","BitSight Support","Partner","https://www.bitsight.com/customer-success-support","","domains","BitSight","BitSight Technologies, Inc.","Bitsight data connector","The [BitSight](https://www.BitSight.com/) Data Connector supports evidence-based cyber risk monitoring by bringing BitSight data in Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the BitSight API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create/Get Bitsight API Token**\n\n Follow these instructions to get a BitSight API Token.\n 1. For SPM App: Refer to the [User Preference](https://service.bitsight.com/app/spm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 2. For TPRM App: Refer to the [User Preference](https://service.bitsight.com/app/tprm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 3. For Classic BitSight: Go to your [Account](https://service.bitsight.com/settings) page, \n\t\tGo to Settings > Account > API Token.""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of BitSight Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the BitSight data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the BitSight API Token."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the BitSight connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-BitSight-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Review + create** to deploy..""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the BitSight data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-BitSight310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. BitSightXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""BitSight API Token is required. See the documentation to [learn more](https://help.bitsighttech.com/hc/en-us/articles/115014888388-API-Token-Management) about API Token.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight/Data%20Connectors/BitSightDataConnector/BitSight_API_FunctionApp.json","true" +"BitsightObservation_statistics_CL","BitSight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight","bitsighttechnologiesinc1695119434818","bitsight_v1","2023-02-20","2024-02-20","","BitSight Support","Partner","https://www.bitsight.com/customer-success-support","","domains","BitSight","BitSight Technologies, Inc.","Bitsight data connector","The [BitSight](https://www.BitSight.com/) Data Connector supports evidence-based cyber risk monitoring by bringing BitSight data in Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the BitSight API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create/Get Bitsight API Token**\n\n Follow these instructions to get a BitSight API Token.\n 1. For SPM App: Refer to the [User Preference](https://service.bitsight.com/app/spm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 2. For TPRM App: Refer to the [User Preference](https://service.bitsight.com/app/tprm/account) tab of your Account page, \n\t\tGo to Settings > Account > User Preferences > API Token.\n 3. For Classic BitSight: Go to your [Account](https://service.bitsight.com/settings) page, \n\t\tGo to Settings > Account > API Token.""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of BitSight Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of BitSight Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the BitSight data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the BitSight API Token."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the BitSight connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-BitSight-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Review + create** to deploy..""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the BitSight data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-BitSight310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. BitSightXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\n\t a. **FunctionName** - Name of the Azure Function App to be created. Default is BitSight. \n\n\t b. **API_token** - Enter API Token of your BitSight account. \n\n\t c. **Azure_Client_Id** - Enter Azure Client Id that you have created during app registration. \n\n\t d. **Azure_Client_Secret** - Enter Azure Client Secret that you have created during creating the client secret. \n\n\t e. **Azure_Tenant_Id** - Enter Azure Tenant Id of your Microsoft Entra ID. \n\n\t f. **Azure_Entra_Object_Id** - Enter Object id of your Microsoft Entra App. \n\n\t g. **Companies** - Please add valid company names separated by asterisk(*). For example: Actors Films*Goliath Investments LLC*HCL Group*Saperix, Inc. \n\n\t h. **Location** - The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t i. **WorkspaceName** - Log analytics workspace name. Can be found under Log analytics \""Settings\"". \n\n\t j. **Portfolio_Companies_Table_Name** - Name of the table to store portfolio companies. Default is BitsightPortfolio_Companies. Please do not keep this field as empty else you will get validation error. \n\n\t k. **Alerts_Table_Name** - Name of the table to store alerts. Default is BitsightAlerts_data. Please do not keep this field as empty else you will get validation error. \n\n\t l. **Breaches_Table_Name** - Name of the table to store breaches. Default is BitsightBreaches_data. Please do not keep this field as empty else you will get validation error. \n\n\t m. **Company_Table_Name** - Name of the table to store company details. Default is BitsightCompany_details. Please do not keep this field as empty else you will get validation error. \n\n\t n. **Company_Rating_Details_Table_Name** - Name of the table to store company rating details. Default is BitsightCompany_rating_details. Please do not keep this field as empty else you will get validation error. \n\n\t o. **Diligence_Historical_Statistics_Table_Name** - Name of the table to store diligence historical statistics. Default is BitsightDiligence_historical_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t p. **Diligence_Statistics_Table_Name** - Name of the table to store diligence statistics. Default is BitsightDiligence_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t q. **Findings_Summary_Table_Name** - Name of the table to store findings summary. Default is BitsightFindings_summary. Please do not keep this field as empty else you will get validation error. \n\n\t r. **Findings_Table_Name** - Name of the table to store findings data. Default is BitsightFindings_data. Please do not keep this field as empty else you will get validation error. \n\n\t s. **Graph_Table_Name** - Name of the table to store graph data. Default is BitsightGraph_data. Please do not keep this field as empty else you will get validation error. \n\n\t t. **Industrial_Statistics_Table_Name** - Name of the table to store industrial statistics. Default is BitsightIndustrial_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t u. **Observation_Statistics_Table_Name** - Name of the table to store observation statistics. Default is BitsightObservation_statistics. Please do not keep this field as empty else you will get validation error. \n\n\t v. **LogLevel** - Select log level or log severity value from DEBUG, INFO, ERROR. By default it is set to INFO. \n\n\t w. **Schedule** - Please enter a valid Quartz cron-expression. (Example: 0 0 * * * *). \n\n\t x. **Schedule_Portfolio** - Please enter a valid Quartz cron-expression. (Example: 0 */30 * * * *). \n\n\t y. **AppInsightsWorkspaceResourceID** - Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""BitSight API Token is required. See the documentation to [learn more](https://help.bitsighttech.com/hc/en-us/articles/115014888388-API-Token-Management) about API Token.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BitSight/Data%20Connectors/BitSightDataConnector/BitSight_API_FunctionApp.json","true" +"BitglassLogs_CL","Bitglass","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Bitglass","azuresentinel","azure-sentinel-solution-bitglass","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","Bitglass","Bitglass","Bitglass","The [Bitglass](https://www.bitglass.com/) data connector provides the capability to retrieve security event logs of the Bitglass services and more events into Microsoft Sentinel through the REST API. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Azure Blob Storage API to pull logs into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Bitglass**](https://aka.ms/sentinel-bitglass-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Bitglass Log Retrieval API**\n\n Follow the instructions to obtain the credentials.\n\n1. Please contact Bitglass [support](https://pages.bitglass.com/Contact.html) and obtain the **BitglassToken** and **BitglassServiceURL** ntation].\n2. Save credentials for using in the data connector.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Bitglass data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Bitglass data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-bitglass-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **BitglassToken**, **BitglassServiceURL** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Bitglass data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-bitglass-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. BitglassXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tBitglassToken\n\t\tBitglassServiceURL\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**BitglassToken** and **BitglassServiceURL** are required for making API calls.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Bitglass/Data%20Connectors/Bitglass_API_FunctionApp.json","true" +"BitwardenEventLogs_CL","Bitwarden","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Bitwarden","8bit-solutions-llc","bitwarden-sentinel-integration","2024-05-12","2024-10-02","","Bitwarden Inc","Partner","https://bitwarden.com","","domains","BitwardenEventLogs","Bitwarden Inc","Bitwarden Event Logs","This connector provides insight into activity of your Bitwarden organization such as user's activity (logged in, changed password, 2fa, etc.), cipher activity (created, updated, deleted, shared, etc.), collection activity, organization activity, and more.","[{""description"": ""Your API key can be found in the Bitwarden organization admin console.\nPlease see [Bitwarden documentation](https://bitwarden.com/help/public-api/#authentication) for more information.\nSelf-hosted Bitwarden servers may need to reconfigure their installation's URL."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Bitwarden Identity Url"", ""placeholder"": ""https://identity.bitwarden.com"", ""type"": ""text"", ""name"": ""identityEndpoint""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Bitwarden Api Url"", ""placeholder"": ""https://api.bitwarden.com"", ""type"": ""text"", ""name"": ""apiEndpoint""}}, {""type"": ""OAuthForm"", ""parameters"": {""clientIdLabel"": ""Client ID"", ""clientSecretLabel"": ""Client Secret"", ""connectButtonLabel"": ""Connect"", ""disconnectButtonLabel"": ""Disconnect""}}], ""title"": ""Connect Bitwarden Event Logs to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Bitwarden Client Id and Client Secret"", ""description"": ""Your API key can be found in the Bitwarden organization admin console. Please see [Bitwarden documentation](https://bitwarden.com/help/public-api/#authentication) for more information.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Bitwarden/Data%20Connectors/BitwardenEventLogs/definitions.json","true" +"BitwardenGroups_CL","Bitwarden","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Bitwarden","8bit-solutions-llc","bitwarden-sentinel-integration","2024-05-12","2024-10-02","","Bitwarden Inc","Partner","https://bitwarden.com","","domains","BitwardenEventLogs","Bitwarden Inc","Bitwarden Event Logs","This connector provides insight into activity of your Bitwarden organization such as user's activity (logged in, changed password, 2fa, etc.), cipher activity (created, updated, deleted, shared, etc.), collection activity, organization activity, and more.","[{""description"": ""Your API key can be found in the Bitwarden organization admin console.\nPlease see [Bitwarden documentation](https://bitwarden.com/help/public-api/#authentication) for more information.\nSelf-hosted Bitwarden servers may need to reconfigure their installation's URL."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Bitwarden Identity Url"", ""placeholder"": ""https://identity.bitwarden.com"", ""type"": ""text"", ""name"": ""identityEndpoint""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Bitwarden Api Url"", ""placeholder"": ""https://api.bitwarden.com"", ""type"": ""text"", ""name"": ""apiEndpoint""}}, {""type"": ""OAuthForm"", ""parameters"": {""clientIdLabel"": ""Client ID"", ""clientSecretLabel"": ""Client Secret"", ""connectButtonLabel"": ""Connect"", ""disconnectButtonLabel"": ""Disconnect""}}], ""title"": ""Connect Bitwarden Event Logs to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Bitwarden Client Id and Client Secret"", ""description"": ""Your API key can be found in the Bitwarden organization admin console. Please see [Bitwarden documentation](https://bitwarden.com/help/public-api/#authentication) for more information.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Bitwarden/Data%20Connectors/BitwardenEventLogs/definitions.json","true" +"BitwardenMembers_CL","Bitwarden","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Bitwarden","8bit-solutions-llc","bitwarden-sentinel-integration","2024-05-12","2024-10-02","","Bitwarden Inc","Partner","https://bitwarden.com","","domains","BitwardenEventLogs","Bitwarden Inc","Bitwarden Event Logs","This connector provides insight into activity of your Bitwarden organization such as user's activity (logged in, changed password, 2fa, etc.), cipher activity (created, updated, deleted, shared, etc.), collection activity, organization activity, and more.","[{""description"": ""Your API key can be found in the Bitwarden organization admin console.\nPlease see [Bitwarden documentation](https://bitwarden.com/help/public-api/#authentication) for more information.\nSelf-hosted Bitwarden servers may need to reconfigure their installation's URL."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Bitwarden Identity Url"", ""placeholder"": ""https://identity.bitwarden.com"", ""type"": ""text"", ""name"": ""identityEndpoint""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Bitwarden Api Url"", ""placeholder"": ""https://api.bitwarden.com"", ""type"": ""text"", ""name"": ""apiEndpoint""}}, {""type"": ""OAuthForm"", ""parameters"": {""clientIdLabel"": ""Client ID"", ""clientSecretLabel"": ""Client Secret"", ""connectButtonLabel"": ""Connect"", ""disconnectButtonLabel"": ""Disconnect""}}], ""title"": ""Connect Bitwarden Event Logs to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Bitwarden Client Id and Client Secret"", ""description"": ""Your API key can be found in the Bitwarden organization admin console. Please see [Bitwarden documentation](https://bitwarden.com/help/public-api/#authentication) for more information.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Bitwarden/Data%20Connectors/BitwardenEventLogs/definitions.json","true" +"","Blackberry CylancePROTECT","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Blackberry%20CylancePROTECT","azuresentinel","azure-sentinel-solution-blackberrycylanceprotect","2022-05-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","BlinkOps","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BlinkOps","blinkoperations1709924858838","azure-sentinel-blink_automation","2025-05-05","","","Blink Support","Partner","https://support.blinkops.com","","domains","","","","","","","","false" +"BHEAttackPathsData_CL","BloodHound Enterprise","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BloodHound%20Enterprise","azurehoundenterprise","bloodhoundenterprise-azuresentinel","2023-05-04","2021-05-04","","SpecterOps","Partner","https://bloodhoundenterprise.io/","","domains","BloodHoundEnterprise","SpecterOps","Bloodhound Enterprise","The solution is designed to test Bloodhound Enterprise package creation process.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a 'BloodHound Enterprise' to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieve BloodHound Enterprise API Key and ID**\n\nTo enable the Azure Function to authenticate successfully and pull logs into Microsoft Sentinel, you must first obtain the API Key and ID from your BloodHound Enterprise instance. See the documentation to learn more about API on the `https://bloodhound.specterops.io/integrations/bloodhound-api/working-with-api`.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the 'BloodHound Enterprise' connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the 'BloodHound Enterprise' API authorization key(s) or Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": ""**Option 1 - Azure Resource Manager (ARM) Template**\n\nUse this method for automated deployment of the 'BloodHound Enterprise' connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)]()\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Tenant URL**, **API Key**, **API ID** 'and/or Other required fields'. \n>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": """", ""description"": ""**Option 2 - Manual Deployment of Azure Functions**\n\n Use the following step-by-step instructions to deploy the 'BloodHound Enterprise' connector manually with Azure Functions.""}, {""title"": ""1. Create a Function App"", ""description"": ""1. From the Azure Portal, navigate to [Function App](https://portal.azure.com/#blade/HubsExtension/BrowseResource/resourceType/Microsoft.Web%2Fsites/kind/functionapp).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, ensure Runtime stack is set to **python 3.11**. \n4. In the **Hosting** tab, ensure **Plan type** is set to **'Consumption (Serverless)'**.\n5.select Storage account\n6. 'Add other required configurations'. \n5. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""2. Import Function App Code(Zip deployment)"", ""description"": ""1. Install Azure CLI\n2. From terminal type **az functionapp deployment source config-zip -g -n --src ** and hit enter. Set the `ResourceGroup` value to: your resource group name. Set the `FunctionApp` value to: your newly created function app name. Set the `Zip File` value to: `digitalshadowsConnector.zip`(path to your zip file). Note:- Download the zip file from the link - [Function App Code](https://github.com/metron-labs/Azure-Sentinel/blob/bloodhound/Solutions/BloodHound/Data%20Connectors/BloodHoundAzureFunction.zip)""}, {""title"": ""3. Configure the Function App"", ""description"": ""1. In the Function App screen, click the Function App name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following 'x (number of)' application settings individually, under Name, with their respective string values (case-sensitive) under Value: \n\t\tDigitalShadowsAccountID\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tDigitalShadowsKey\n\t\tDigitalShadowsSecret\n\t\tHistoricalDays\n\t\tDigitalShadowsURL\n\t\tClassificationFilterOperation\n\t\tHighVariabilityClassifications\n\t\tFUNCTION_NAME\n\t\tlogAnalyticsUri (optional)\n(add any other settings required by the Function App)\nSet the `DigitalShadowsURL` value to: `https://api.searchlight.app/v1`\nSet the `HighVariabilityClassifications` value to: `exposed-credential,marked-document`\nSet the `ClassificationFilterOperation` value to: `exclude` for exclude function app or `include` for include function app \n>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Azure Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: https://.ods.opinsights.azure.us. \n4. Once all application settings have been entered, click **Save**.""}, {""instructions"": [{""type"": ""InstructionStepsGroup"", ""parameters"": {""enable"": true, ""instructionSteps"": [{""title"": ""**STEP 3 - Register the Application in Microsoft Entra ID"", ""description"": ""1. **Open the [Microsoft Entra ID page](https://entra.microsoft.com/)**:\n - Click the provided link to open the **Microsoft Entra ID** registration page in a new tab.\n - Ensure you are logged in with an account that has **Admin level** permissions.\n\n2. **Create a New Application**:\n - In the **Microsoft Entra ID portal**, select **App registrations** mentioned on the left-hand side tab.\n - Click on **+ New registration**.\n - Fill out the following fields:\n - **Name**: Enter a name for the app (e.g., \u201cBloodHound App\u201d).\n - **Supported account types**: Choose **Accounts in this organizational directory only** (Default Directory only - Single tenant).\n - **Redirect URI**: Leave this blank unless required otherwise.\n - Click **Register** to create the application.\n\n3. **Copy Application and Tenant IDs**:\n - Once the app is registered, note the **Application (client) ID** and **Directory (tenant) ID** from the **Overview** page. You\u2019ll need these for the integration.\n\n4. **Create a Client Secret**:\n - In the **Certificates & secrets** section, click **+ New client secret**.\n - Add a description (e.g., 'BloodHound Secret') and set an expiration (e.g., 1 year).\n - Click **Add**.\n - **Copy the client secret value immediately**, as it will not be shown again.""}, {""title"": ""**STEP 4 - Assign the \""Monitoring Metrics Publisher\"" Role to the App"", ""description"": ""1. **Open the Resource Group in Azure Portal**:\n - Navigate to the **Resource Group** that contains the **Log Analytics Workspace** and **Data Collection Rules (DCRs)** where you want the app to push data.\n\n2. **Assign the Role**:\n - In the **Resource Group** menu, click on **Access control (IAM)** mentioned on the left-hand side tab ..\n - Click on **+ Add** and select **Add role assignment**.\n - In the **Role** dropdown, search for and select the **Monitoring Metrics Publisher** role.\n - Under **Assign access to**, choose **Azure AD user, group, or service principal**.\n - In the **Select** field, search for your registered app by **name** or **client ID**.\n - Click **Save** to assign the role to the application.""}, {""title"": ""**STEP 5 - Deploy the ARM Template"", ""description"": ""1. **Retrieve the Workspace ID**:\n - After assigning the role, you will need the **Workspace ID**.\n - Navigate to the **Log Analytics Workspace** within the **Resource Group**.\n - In the **Overview** section, locate the **Workspace ID** field under **Workspace details**.\n - **Copy the Workspace ID** and keep it handy for the next steps.\n\n2. **Click the Deploy to Azure Button**:\n - [![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fmetron-labs%2FAzure-Sentinel%2Fbloodhound%2FSolutions%2FBloodHound%2FData%2520Connectors%2FDeployToAzure.json).\n - This will take you directly to the Azure portal to start the deployment.\n\n3. **Review and Customize Parameters**:\n - On the custom deployment page, ensure you\u2019re deploying to the correct **subscription** and **resource group**.\n - Fill in the parameters like **workspace name**, **workspace ID**, and **workspace location**.\n\n4. **Click Review + Create** and then **Create** to deploy the resources.""}, {""title"": ""**STEP 6 - Verify DCE, DCR, and Log Analytics Table Setup"", ""description"": ""1. **Check the Data Collection Endpoint (DCE)**:\n - After deploying, go to **Azure Portal > Data Collection Endpoints**.\n - Verify that the **BloodHoundDCE** endpoint has been created successfully.\n - **Copy the DCE Logs Ingestion URI**, as you\u2019ll need this for generating the webhook URL.\n\n2. **Confirm Data Collection Rule (DCR) Setup**:\n - Go to **Azure Portal > Data Collection Rules**.\n - Ensure the **BloodHoundDCR** rule is present.\n - **Copy the Immutable ID** of the DCR from the Overview page, as you\u2019ll need it for the webhook URL.\n\n3. **Validate Log Analytics Table**:\n - Navigate to your **Log Analytics Workspace** (linked to Microsoft Sentinel).\n - Under the **Tables** section, verify that the **BloodHoundTable_CL** table has been created successfully and is ready to receive data.""}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**BloodHound Enterprise API key & Id** is required. See the documentation to learn more about API on the `https://bloodhound.specterops.io/integrations/bloodhound-api/working-with-api`.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/BloodHound%20Enterprise/Data%20Connectors/BloodHoundFunction.json","true" +"BoxEvents_CL","Box","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Box","azuresentinel","azure-sentinel-solution-box","2022-05-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","BoxDataConnector","Box","Box","The Box data connector provides the capability to ingest [Box enterprise's events](https://developer.box.com/guides/events/#admin-events) into Microsoft Sentinel using the Box REST API. Refer to [Box documentation](https://developer.box.com/guides/events/enterprise-events/for-enterprise/) for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Box REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": "">**NOTE:** This connector depends on a parser based on Kusto Function to work as expected [**BoxEvents**](https://aka.ms/sentinel-BoxDataConnector-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration of the Box events collection**\n\nSee documentation to [setup JWT authentication](https://developer.box.com/guides/authentication/jwt/jwt-setup/) and [obtain JSON file with credentials](https://developer.box.com/guides/authentication/jwt/with-sdk/#prerequisites).""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Box data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Box JSON configuration file, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Box data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-BoxDataConnector-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **AzureSentinelWorkspaceId**, **AzureSentinelSharedKey**, **BoxConfigJSON**\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Box data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the [Azure Function App](https://aka.ms/sentinel-BoxDataConnector-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tAzureSentinelWorkspaceId\n\t\tAzureSentinelSharedKey\n\t\tBOX_CONFIG_JSON\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Box API Credentials"", ""description"": ""Box config JSON file is required for Box REST API JWT authentication. [See the documentation to learn more about JWT authentication](https://developer.box.com/guides/authentication/jwt/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Box/Data%20Connectors/Box_API_FunctionApp.json","true" +"BoxEventsV2_CL","Box","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Box","azuresentinel","azure-sentinel-solution-box","2022-05-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","BoxEventsCCPDefinition","Microsoft","Box Events (CCP)","The Box data connector provides the capability to ingest [Box enterprise's events](https://developer.box.com/guides/events/#admin-events) into Microsoft Sentinel using the Box REST API. Refer to [Box documentation](https://developer.box.com/guides/events/enterprise-events/for-enterprise/) for more information.","[{""description"": "">**NOTE:** This connector uses Codeless Connecor Platform (CCP) to connect to the Box REST API to pull logs into Microsoft Sentinel.""}, {""description"": "">**NOTE:** This connector depends on a parser based on Kusto Function to work as expected [**BoxEvents**](https://aka.ms/sentinel-BoxDataConnector-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""description"": ""**STEP 1 - Create Box Custom Application**\n\nSee documentation to [setup client credentials authentication](https://developer.box.com/guides/authentication/client-credentials/client-credentials-setup/)\n""}, {""description"": ""**STEP 2 - Grab Client ID and Client Secret values**\n\nYou might need to setup 2FA to fetch the secret.\n""}, {""description"": ""**STEP 3 - Grab Box Enterprise ID from Box Admin Console**\n\nSee documentation to [find Enterprise ID](https://developer.box.com/platform/appendix/locating-values/)\n""}, {""description"": ""Provide the required values below:\n"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Box Enterprise ID"", ""placeholder"": ""123456"", ""type"": ""text"", ""name"": ""boxEnterpriseId""}}, {""type"": ""OAuthForm"", ""parameters"": {""clientIdLabel"": ""Client ID"", ""clientSecretLabel"": ""Client Secret"", ""connectButtonLabel"": ""Connect"", ""disconnectButtonLabel"": ""Disconnect""}}], ""title"": ""Connect to Box to start collecting event logs to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Box API credentials"", ""description"": ""Box API requires a Box App client ID and client secret to authenticate. [See the documentation to learn more about Client Credentials grant](https://developer.box.com/guides/authentication/client-credentials/client-credentials-setup/)""}, {""name"": ""Box Enterprise ID"", ""description"": ""Box Enterprise ID is required to make the connection. See documentation to [find Enterprise ID](https://developer.box.com/platform/appendix/locating-values/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Box/Data%20Connectors/BoxEvents_ccp/BoxEvents_DataConnectorDefinition.json","true" +"BoxEvents_CL","Box","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Box","azuresentinel","azure-sentinel-solution-box","2022-05-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","BoxEventsCCPDefinition","Microsoft","Box Events (CCP)","The Box data connector provides the capability to ingest [Box enterprise's events](https://developer.box.com/guides/events/#admin-events) into Microsoft Sentinel using the Box REST API. Refer to [Box documentation](https://developer.box.com/guides/events/enterprise-events/for-enterprise/) for more information.","[{""description"": "">**NOTE:** This connector uses Codeless Connecor Platform (CCP) to connect to the Box REST API to pull logs into Microsoft Sentinel.""}, {""description"": "">**NOTE:** This connector depends on a parser based on Kusto Function to work as expected [**BoxEvents**](https://aka.ms/sentinel-BoxDataConnector-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""description"": ""**STEP 1 - Create Box Custom Application**\n\nSee documentation to [setup client credentials authentication](https://developer.box.com/guides/authentication/client-credentials/client-credentials-setup/)\n""}, {""description"": ""**STEP 2 - Grab Client ID and Client Secret values**\n\nYou might need to setup 2FA to fetch the secret.\n""}, {""description"": ""**STEP 3 - Grab Box Enterprise ID from Box Admin Console**\n\nSee documentation to [find Enterprise ID](https://developer.box.com/platform/appendix/locating-values/)\n""}, {""description"": ""Provide the required values below:\n"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Box Enterprise ID"", ""placeholder"": ""123456"", ""type"": ""text"", ""name"": ""boxEnterpriseId""}}, {""type"": ""OAuthForm"", ""parameters"": {""clientIdLabel"": ""Client ID"", ""clientSecretLabel"": ""Client Secret"", ""connectButtonLabel"": ""Connect"", ""disconnectButtonLabel"": ""Disconnect""}}], ""title"": ""Connect to Box to start collecting event logs to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Box API credentials"", ""description"": ""Box API requires a Box App client ID and client secret to authenticate. [See the documentation to learn more about Client Credentials grant](https://developer.box.com/guides/authentication/client-credentials/client-credentials-setup/)""}, {""name"": ""Box Enterprise ID"", ""description"": ""Box Enterprise ID is required to make the connection. See documentation to [find Enterprise ID](https://developer.box.com/platform/appendix/locating-values/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Box/Data%20Connectors/BoxEvents_ccp/BoxEvents_DataConnectorDefinition.json","true" +"CommonSecurityLog","Broadcom SymantecDLP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Broadcom%20SymantecDLP","azuresentinel","azure-sentinel-solution-broadcomsymantecdlp","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","BroadcomSymantecDLP","Broadcom","[Deprecated] Broadcom Symantec DLP via Legacy Agent","The [Broadcom Symantec Data Loss Prevention (DLP)](https://www.broadcom.com/products/cyber-security/information-protection/data-loss-prevention) connector allows you to easily connect your Symantec DLP with Microsoft Sentinel, to create custom dashboards, alerts, and improve investigation. This gives you more insight into your organization’s information, where it travels, and improves your security operation capabilities.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias SymantecDLP and load the function code or click [here](https://aka.ms/sentinel-symantecdlp-parser). The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python \u2013version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Symantec DLP logs to a Syslog agent"", ""description"": ""Configure Symantec DLP to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent.\n1. [Follow these instructions](https://knowledge.broadcom.com/external/article/159509/generating-syslog-messages-from-data-los.html) to configure the Symantec DLP to forward syslog\n2. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python \u2013version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Broadcom%20SymantecDLP/Data%20Connectors/Connector_Syslog_SymantecDLP.json","true" +"CommonSecurityLog","Broadcom SymantecDLP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Broadcom%20SymantecDLP","azuresentinel","azure-sentinel-solution-broadcomsymantecdlp","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","BroadcomSymantecDLPAma","Broadcom","[Deprecated] Broadcom Symantec DLP via AMA","The [Broadcom Symantec Data Loss Prevention (DLP)](https://www.broadcom.com/products/cyber-security/information-protection/data-loss-prevention) connector allows you to easily connect your Symantec DLP with Microsoft Sentinel, to create custom dashboards, alerts, and improve investigation. This gives you more insight into your organization’s information, where it travels, and improves your security operation capabilities.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias SymantecDLP and load the function code or click [here](https://aka.ms/sentinel-symantecdlp-parser). The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Forward Symantec DLP logs to a Syslog agent"", ""description"": ""Configure Symantec DLP to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent.\n1. [Follow these instructions](https://knowledge.broadcom.com/external/article/159509/generating-syslog-messages-from-data-los.html) to configure the Symantec DLP to forward syslog\n2. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address."", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Broadcom%20SymantecDLP/Data%20Connectors/template_SymantecDLPAMA.json","true" +"","Business Email Compromise - Financial Fraud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Business%20Email%20Compromise%20-%20Financial%20Fraud","azuresentinel","azure-sentinel-solution-bec_financialfraud","2023-08-04","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"Syslog","CTERA","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CTERA","cteranetworksltd1651947437632","ctera-microsoft-sentinel","2024-07-28","","","CTERA","Partner","https://www.ctera.com/","","domains","CTERA","CTERA Networks Ltd","CTERA Syslog","The CTERA Data Connector for Microsoft Sentinel offers monitoring and threat detection capabilities for your CTERA solution.
It includes a workbook visualizing the sum of all operations per type, deletions, and denied access operations.
It also provides analytic rules which detects ransomware incidents and alert you when a user is blocked due to suspicious ransomware activity.
Additionally, it helps you identify critical patterns such as mass access denied events, mass deletions, and mass permission changes, enabling proactive threat management and response.","[{""title"": ""Step 1: Connect CTERA Platform to Syslog"", ""description"": ""Set up your CTERA portal syslog connection and Edge-Filer Syslog connector"", ""instructions"": [{""parameters"": {""title"": ""CTERA Syslog Configuration"", ""instructionSteps"": [{""title"": ""Portal Syslog connection"", ""description"": ""Connect CTERA Portal to syslog server, see instructions https://kb.ctera.com/v1/docs/en/managing-log-settings?highlight=logg""}, {""title"": ""Edge Filer Audit logs"", ""description"": ""Enable Audit logs on the desired Edge-filers""}, {""title"": ""Edge-Filer Syslog Service"", ""description"": ""Enable Edge-Filer Syslog service, see instructions https://kb.ctera.com/v1/docs/en/setting-up-the-edge-filer-syslog-service-2?highlight=Edge%20Filer%20Syslog""}]}}]}, {""title"": ""Step 2: Install Azure Monitor Agent (AMA) on Syslog Server"", ""description"": ""Install the Azure Monitor Agent (AMA) on your syslog server to enable data collection."", ""instructions"": [{""parameters"": {""title"": ""Install Azure Monitor Agent"", ""instructionSteps"": [{""title"": ""Log in to Azure Portal"", ""description"": ""Use your Azure credentials to log in to the Azure Portal.""}, {""title"": ""Navigate to Azure Arc"", ""description"": ""In the Azure Portal, go to 'Azure Arc' and select your connected syslog server.""}, {""title"": ""Select Extensions"", ""description"": ""In the Azure Arc settings for your syslog server, navigate to the 'Extensions' section.""}, {""title"": ""Add Extension"", ""description"": ""Click on 'Add' and select 'Azure Monitor Agent' from the list of available extensions.""}, {""title"": ""Install AMA"", ""description"": ""Follow the prompts to install the Azure Monitor Agent on your syslog server. For detailed instructions, refer to the official documentation: [Install Azure Monitor Agent](https://learn.microsoft.com/en-us/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal)""}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CTERA/Data%20Connectors/CTERA_Data_Connector.json","true" +"CBSLog_Azure_1_CL","CTM360","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CTM360","ctm360wll1698919697848","ctm360_microsoft_sentinel_solution","2023-10-23","","","Cyber Threat Management 360","Partner","https://www.ctm360.com/","","domains","CBSPollingIDAzureFunctions","CTM360","Cyber Blind Spot Integration","Through the API integration, you have the capability to retrieve all the issues related to your CBS organizations via a RESTful interface.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a 'CyberBlindSpot' to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the 'CyberBlindSpot' API**\n\nThe provider should provide or link to detailed steps to configure the 'CyberBlindSpot' API endpoint so that the Azure Function can authenticate to it successfully, get its authorization key or token, and pull the appliance's logs into Microsoft Sentinel.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the 'CyberBlindSpot' connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the 'CyberBlindSpot' API authorization key(s) readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": ""**Option 1 - Azure Resource Manager (ARM) Template**\n\nUse this method for automated deployment of the 'CyberBlindSpot' connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CTM360-CBS-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-CTM360-CBS-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **API **, 'and/or Other required fields'. \n>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the CTM360 CBS data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development.\n\n1. Download the [Azure Function App](https://raw.githubusercontent.com/CTM360-Integrations/Azure-Sentinel/ctm360-HV-CBS-azurefunctionapp/Solutions/CTM360/Data%20Connectors/CBS/AzureFunctionCTM360_CBS.zip) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CTIXYZ).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tCTM360AccountID\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tCTM360Key\n\t\tFUNCTION_NAME\n\t\tlogAnalyticsUri - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CTM360/Data%20Connectors/CBS/CTM360_CBS_API_functionApp.json","true" +"HackerViewLog_Azure_1_CL","CTM360","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CTM360","ctm360wll1698919697848","ctm360_microsoft_sentinel_solution","2023-10-23","","","Cyber Threat Management 360","Partner","https://www.ctm360.com/","","domains","HVPollingIDAzureFunctions","CTM360","HackerView Intergration","Through the API integration, you have the capability to retrieve all the issues related to your HackerView organizations via a RESTful interface.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a '' to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the 'HackerView' API**\n\nThe provider should provide or link to detailed steps to configure the 'HackerView' API endpoint so that the Azure Function can authenticate to it successfully, get its authorization key or token, and pull the appliance's logs into Microsoft Sentinel.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the 'HackerView' connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the 'HackerView' API authorization key(s) readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": ""**Option 1 - Azure Resource Manager (ARM) Template**\n\nUse this method for automated deployment of the 'HackerView' connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CTM360-HV-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-CTM360-HV-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **API **, 'and/or Other required fields'. \n>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": """", ""description"": ""**Option 2 - Manual Deployment of Azure Functions**\n\n Use the following step-by-step instructions to deploy the 'HackerView' connector manually with Azure Functions.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the CTM360 CBS data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development.\n\n1. Download the [Azure Function App](https://raw.githubusercontent.com/CTM360-Integrations/Azure-Sentinel/ctm360-HV-CBS-azurefunctionapp/Solutions/CTM360/Data%20Connectors/HackerView/AzureFunctionCTM360_HV.zip) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CTIXYZ).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tCTM360AccountID\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tCTM360Key\n\t\tFUNCTION_NAME\n\t\tlogAnalyticsUri - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CTM360/Data%20Connectors/HackerView/CTM360_HV_API_FunctionApp.json","true" +"","Check Point","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Check%20Point","checkpoint","checkpoint-sentinel-solutions","2021-08-13","","","Check Point","Partner","https://www.checkpoint.com/support-services/contact-support/","","domains","","","","","","","","false" +"CloudGuard_SecurityEvents_CL","Check Point CloudGuard CNAPP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Check%20Point%20CloudGuard%20CNAPP","checkpoint","checkpoint-sentinel-solutions-cloud-guard","2024-11-12","","","Check Point","Partner","https://www.checkpoint.com/support-services/contact-support/","","domains","CloudGuardCCPDefinition","CheckPoint","Check Point CloudGuard CNAPP Connector for Microsoft Sentinel","The [CloudGuard](https://sc1.checkpoint.com/documents/CloudGuard_Dome9/Documentation/Overview/CloudGuard-CSPM-Introduction.htm?cshid=help_center_documentation) data connector enables the ingestion of security events from the CloudGuard API into Microsoft Sentinel™, using Microsoft Sentinel’s Codeless Connector Platform. The connector supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) which parses incoming security event data into custom columns. This pre-parsing process eliminates the need for query-time parsing, resulting in improved performance for data queries.","[{""description"": ""To enable the CloudGuard connector for Microsoft Sentinel, enter the required information below and select Connect.\n>"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""API Key ID"", ""placeholder"": ""api_key"", ""type"": ""text"", ""name"": ""api_key""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key Secret"", ""placeholder"": ""api_secret"", ""type"": ""password"", ""name"": ""api_secret""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CloudGuard Endpoint URL"", ""placeholder"": ""e.g. https://api.dome9.com"", ""type"": ""text"", ""name"": ""endpoint_url""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Filter"", ""placeholder"": ""Paste filter from CloudGuard"", ""type"": ""text"", ""name"": ""query_filter""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""title"": ""Connect CloudGuard Security Events to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""CloudGuard API Key"", ""description"": ""Refer to the instructions provided [here](https://sc1.checkpoint.com/documents/CloudGuard_Dome9/Documentation/Settings/Users-Roles.htm#add_service) to generate an API key.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Check%20Point%20CloudGuard%20CNAPP/Data%20Connectors/CloudGuard_ccp/CloudGuard_DataConnectorDefinition.json","true" +"argsentdc_CL","Check Point Cyberint Alerts","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Check%20Point%20Cyberint%20Alerts","checkpoint","checkpoint-cyberint-solutions-alerts","2025-03-18","","","Cyberint","Partner","https://cyberint.com/customer-support/","","domains","CheckPointCyberintAlerts","Checkpoint Cyberint","Check Point Cyberint Alerts Connector (via Codeless Connector Platform)","Cyberint, a Check Point company, provides a Microsoft Sentinel integration to streamline critical Alerts and bring enriched threat intelligence from the Infinity External Risk Management solution into Microsoft Sentinel. This simplifies the process of tracking the status of tickets with automatic sync updates across systems. Using this new integration for Microsoft Sentinel, existing Cyberint and Microsoft Sentinel customers can easily pull logs based on Cyberint's findings into Microsoft Sentinel platform.","[{""title"": ""Connect Checkpoint Cyberint Alerts to Microsoft Sentinel"", ""description"": ""To enable the connector provide the required information below and click on Connect.\n>"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Argos URL"", ""placeholder"": ""Argos URL"", ""type"": ""text"", ""name"": ""argosurl""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""password"", ""name"": ""apikey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Customer Name"", ""placeholder"": ""Customer Name"", ""type"": ""text"", ""name"": ""customername""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""Connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Check Point Cyberint API Key, Argos URL, and Customer Name"", ""description"": ""The connector API key, Argos URL, and Customer Name are required""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Check%20Point%20Cyberint%20Alerts/Data%20Connectors/CyberintArgosAlertsLogs_ccp/CyberintArgosAlertsLogs_connectorDefinition.json","true" +"iocsent_CL","Check Point Cyberint IOC","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Check%20Point%20Cyberint%20IOC","checkpoint","azure-sentinel-checkpoint-cyberint-ioc","2025-04-29","","","Cyberint","Partner","https://cyberint.com/customer-support/","","domains","CheckPointCyberintIOC","Checkpoint Cyberint","Check Point Cyberint IOC Connector","This is data connector for Check Point Cyberint IOC.","[{""title"": ""Connect Checkpoint Cyberint Alerts to Microsoft Sentinel"", ""description"": ""To enable the connector provide the required information below and click on Connect.\n>"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Argos URL"", ""placeholder"": ""Argos URL"", ""type"": ""text"", ""name"": ""argosurl""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API key"", ""placeholder"": ""API key"", ""type"": ""text"", ""name"": ""apikey""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""Connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Check Point Cyberint API Key and Argos URL"", ""description"": ""The connector API key and Argos URL are required""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Check%20Point%20Cyberint%20IOC/Data%20Connectors/CyberintArgosIOCLogs_ccp/CyberintArgosIOCLogs_connectorDefinition.json","true" +"","CheckPhish by Bolster","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CheckPhish%20by%20Bolster","azuresentinel","azure-sentinel-solution-checkphishbybolster","2022-10-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"Syslog","Cisco ACI","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20ACI","azuresentinel","azure-sentinel-solution-ciscoaci","2021-07-03","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoACI","Cisco","[Deprecated] Cisco Application Centric Infrastructure","[Cisco Application Centric Infrastructure (ACI)](https://www.cisco.com/c/en/us/solutions/collateral/data-center-virtualization/application-centric-infrastructure/solution-overview-c22-741487.html) data connector provides the capability to ingest [Cisco ACI logs](https://www.cisco.com/c/en/us/td/docs/switches/datacenter/aci/apic/sw/all/syslog/guide/b_ACI_System_Messages_Guide/m-aci-system-messages-reference.html) into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**CiscoACIEvent**](https://aka.ms/sentinel-CiscoACI-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**NOTE:** This data connector has been developed using Cisco ACI Release 1.x"", ""instructions"": []}, {""title"": ""1. Configure Cisco ACI system sending logs via Syslog to remote server where you will install the agent."", ""description"": ""[Follow these steps](https://www.cisco.com/c/en/us/td/docs/switches/datacenter/aci/apic/sw/1-x/basic-config/b_ACI_Config_Guide/b_ACI_Config_Guide_chapter_010.html#d2933e4611a1635) to configure Syslog Destination, Destination Group, and Syslog Source.""}, {""title"": ""2. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Server to which the logs will be forwarded.\n\n> Logs on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""3. Check logs in Microsoft Sentinel"", ""description"": ""Open Log Analytics to check if the logs are received using the Syslog schema.\n\n>**NOTE:** It may take up to 15 minutes before new logs will appear in Syslog table."", ""instructions"": []}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20ACI/Data%20Connectors/CiscoACI_Syslog.json","true" +"CiscoETD_CL","Cisco ETD","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20ETD","cisco","cisco-etd-sentinel","2024-03-04","","","Cisco Systems","Partner","","","domains","CiscoETD","Cisco","Cisco ETD","The connector fetches data from ETD api for threat analysis","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ETD API to pull its logs into Microsoft Sentinel.""}, {""title"": """", ""description"": ""**Follow the deployment steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the ETD data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following).\n"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco ETD data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CiscoETD-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Region**. \n3. Enter the **WorkspaceID**, **SharedKey**, **ClientID**, **ClientSecret**, **ApiKey**, **Verdicts**, **ETD Region**\n4. Click **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Email Threat Defense API, API key, Client ID and Secret"", ""description"": ""Ensure you have the API key, Client ID and Secret key.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20ETD/Data%20Connectors/CiscoETD_API_FunctionApp.json","true" +"CommonSecurityLog","Cisco Firepower EStreamer","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Firepower%20EStreamer","cisco","cisco-firepower-estreamer","2022-05-25","","","Cisco","Partner","https://www.cisco.com/c/en_in/support/index.html","","domains","CiscoFirepowerEStreamer","Cisco","[Deprecated] Cisco Firepower eStreamer via Legacy Agent","eStreamer is a Client Server API designed for the Cisco Firepower NGFW Solution. The eStreamer client requests detailed event data on behalf of the SIEM or logging solution in the Common Event Format (CEF).","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 25226 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Install the Firepower eNcore client"", ""description"": ""Install and configure the Firepower eNcore eStreamer client, for more details see full install [guide](https://www.cisco.com/c/en/us/td/docs/security/firepower/670/api/eStreamer_enCore/eStreamereNcoreSentinelOperationsGuide_409.html)"", ""innerSteps"": [{""title"": ""2.1 Download the Firepower Connector from github"", ""description"": ""Download the latest version of the Firepower eNcore connector for Microsoft Sentinel [here](https://github.com/CiscoSecurity/fp-05-microsoft-sentinel-connector). If you plan on using python3 use the [python3 eStreamer connector](https://github.com/CiscoSecurity/fp-05-microsoft-sentinel-connector/tree/python3)""}, {""title"": ""2.2 Create a pkcs12 file using the Azure/VM Ip Address"", ""description"": ""Create a pkcs12 certificate using the public IP of the VM instance in Firepower under System->Integration->eStreamer, for more information please see install [guide](https://www.cisco.com/c/en/us/td/docs/security/firepower/670/api/eStreamer_enCore/eStreamereNcoreSentinelOperationsGuide_409.html#_Toc527049443)""}, {""title"": ""2.3 Test Connectivity between the Azure/VM Client and the FMC"", ""description"": ""Copy the pkcs12 file from the FMC to the Azure/VM instance and run the test utility (./encore.sh test) to ensure a connection can be established, for more details please see the setup [guide](https://www.cisco.com/c/en/us/td/docs/security/firepower/670/api/eStreamer_enCore/eStreamereNcoreSentinelOperationsGuide_409.html#_Toc527049430)""}, {""title"": ""2.4 Configure encore to stream data to the agent"", ""description"": ""Configure encore to stream data via TCP to the Microsoft Agent, this should be enabled by default, however, additional ports and streaming protocols can configured depending on your network security posture, it is also possible to save the data to the file system, for more information please see [Configure Encore](https://www.cisco.com/c/en/us/td/docs/security/firepower/670/api/eStreamer_enCore/eStreamereNcoreSentinelOperationsGuide_409.html#_Toc527049433)""}]}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Firepower%20EStreamer/Data%20Connectors/CiscoFirepowerEStreamerCollector.json","true" +"CommonSecurityLog","Cisco Firepower EStreamer","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Firepower%20EStreamer","cisco","cisco-firepower-estreamer","2022-05-25","","","Cisco","Partner","https://www.cisco.com/c/en_in/support/index.html","","domains","CiscoFirepowerEStreamerAma","Cisco","[Deprecated] Cisco Firepower eStreamer via AMA","eStreamer is a Client Server API designed for the Cisco Firepower NGFW Solution. The eStreamer client requests detailed event data on behalf of the SIEM or logging solution in the Common Event Format (CEF).","[{""title"": """", ""description"": """", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Install the Firepower eNcore client"", ""description"": ""Install and configure the Firepower eNcore eStreamer client, for more details see full install [guide](https://www.cisco.com/c/en/us/td/docs/security/firepower/670/api/eStreamer_enCore/eStreamereNcoreSentinelOperationsGuide_409.html)"", ""innerSteps"": [{""title"": ""1. Download the Firepower Connector from github"", ""description"": ""Download the latest version of the Firepower eNcore connector for Microsoft Sentinel [here](https://github.com/CiscoSecurity/fp-05-microsoft-sentinel-connector). If you plan on using python3 use the [python3 eStreamer connector](https://github.com/CiscoSecurity/fp-05-microsoft-sentinel-connector/tree/python3)""}, {""title"": ""2. Create a pkcs12 file using the Azure/VM Ip Address"", ""description"": ""Create a pkcs12 certificate using the public IP of the VM instance in Firepower under System->Integration->eStreamer, for more information please see install [guide](https://www.cisco.com/c/en/us/td/docs/security/firepower/670/api/eStreamer_enCore/eStreamereNcoreSentinelOperationsGuide_409.html#_Toc527049443)""}, {""title"": ""3. Test Connectivity between the Azure/VM Client and the FMC"", ""description"": ""Copy the pkcs12 file from the FMC to the Azure/VM instance and run the test utility (./encore.sh test) to ensure a connection can be established, for more details please see the setup [guide](https://www.cisco.com/c/en/us/td/docs/security/firepower/670/api/eStreamer_enCore/eStreamereNcoreSentinelOperationsGuide_409.html#_Toc527049430)""}, {""title"": ""4. Configure encore to stream data to the agent"", ""description"": ""Configure encore to stream data via TCP to the Microsoft Agent, this should be enabled by default, however, additional ports and streaming protocols can configured depending on your network security posture, it is also possible to save the data to the file system, for more information please see [Configure Encore](https://www.cisco.com/c/en/us/td/docs/security/firepower/670/api/eStreamer_enCore/eStreamereNcoreSentinelOperationsGuide_409.html#_Toc527049433)""}]}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Firepower%20EStreamer/Data%20Connectors/template_CiscoFirepowerEStreamerAMA.json","true" +"Syslog","Cisco ISE","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20ISE","azuresentinel","azure-sentinel-solution-ciscoise","2021-07-03","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoISE","Cisco","[Deprecated] Cisco Identity Services Engine","The Cisco Identity Services Engine (ISE) data connector provides the capability to ingest [Cisco ISE](https://www.cisco.com/c/en/us/products/security/identity-services-engine/index.html) events into Microsoft Sentinel. It helps you gain visibility into what is happening in your network, such as who is connected, which applications are installed and running, and much more. Refer to [Cisco ISE logging mechanism documentation](https://www.cisco.com/c/en/us/td/docs/security/ise/2-7/admin_guide/b_ise_27_admin_guide/b_ISE_admin_27_maintain_monitor.html#reference_BAFBA5FA046A45938810A5DF04C00591) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://aka.ms/sentinel-ciscoise-parser) to create the Kusto Functions alias, **CiscoISEEvent**"", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n2. Select **Apply below configuration to my machines** and select the facilities and severities.\n3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure Cisco ISE Remote Syslog Collection Locations"", ""description"": ""[Follow these instructions](https://www.cisco.com/c/en/us/td/docs/security/ise/2-7/admin_guide/b_ise_27_admin_guide/b_ISE_admin_27_maintain_monitor.html#ID58) to configure remote syslog collection locations in your Cisco ISE deployment.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20ISE/Data%20Connectors/Connector_Cisco_ISE.json","true" +"ASimAuditEventLogs","Cisco Meraki Events via REST API","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Meraki%20Events%20via%20REST%20API","azuresentinel","azure-sentinel-solution-ciscomerakinativepoller","2023-07-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoMerakiMultiRule","Microsoft","Cisco Meraki (using REST API)","The [Cisco Meraki](https://aka.ms/ciscomeraki) connector allows you to easily connect your Cisco Meraki organization events (Security events, Configuration Changes and API Requests) to Microsoft Sentinel. The data connector uses the [Cisco Meraki REST API](https://developer.cisco.com/meraki/api-v1/#!get-organization-appliance-security-events) to fetch logs and supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received data and ingests into ASIM and custom tables in your Log Analytics workspace. This data connector benefits from capabilities such as DCR based ingestion-time filtering, data normalization.

**Supported ASIM schema:**
1. Network Session
2. Web Session
3. Audit Event","[{""description"": ""Currently, this connector allows to ingest events from the following [Cisco Meraki REST API](https://developer.cisco.com/meraki/api-v1/#!get-organization-appliance-security-events) endpoint: \n 1. [Get Organization Appliance Security Events](https://developer.cisco.com/meraki/api-latest/#!get-organization-appliance-security-events) \n>This connector parses **IDS Alert** events into ASimNetworkSessionLogs Table and **File Scanned** events into ASimWebSessionLogs Table. \n 2. [Get Organization Api Requests](https://developer.cisco.com/meraki/api-latest/#!get-organization-api-requests) \n>This connector parses events into ASimWebSessionLogs Table. \n 3. [Get Organization Configuration Changes](https://developer.cisco.com/meraki/api-latest/#!get-organization-configuration-changes) \n>This connector parses events into ASimAuditEventLogs Table."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Organization Id"", ""placeholder"": ""OrganizationId"", ""type"": ""text"", ""name"": ""organization""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""ApiKey"", ""type"": ""password"", ""name"": ""apiKey""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""title"": ""Connect Cisco Meraki events to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Cisco Meraki REST API Key"", ""description"": ""Enable API access in Cisco Meraki and generate API Key. Please refer to Cisco Meraki official [documentation](https://aka.ms/ciscomerakiapikey) for more information.""}, {""name"": ""Cisco Meraki Organization Id"", ""description"": ""Obtain your Cisco Meraki organization id to fetch security events. Follow the steps in the [documentation](https://aka.ms/ciscomerakifindorg) to obtain the Organization Id using the Meraki API Key obtained in previous step.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Meraki%20Events%20via%20REST%20API/Data%20Connectors/CiscoMerakiMultiRule_ccp/dataConnectorDefinition.json","true" +"ASimNetworkSessionLogs","Cisco Meraki Events via REST API","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Meraki%20Events%20via%20REST%20API","azuresentinel","azure-sentinel-solution-ciscomerakinativepoller","2023-07-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoMerakiMultiRule","Microsoft","Cisco Meraki (using REST API)","The [Cisco Meraki](https://aka.ms/ciscomeraki) connector allows you to easily connect your Cisco Meraki organization events (Security events, Configuration Changes and API Requests) to Microsoft Sentinel. The data connector uses the [Cisco Meraki REST API](https://developer.cisco.com/meraki/api-v1/#!get-organization-appliance-security-events) to fetch logs and supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received data and ingests into ASIM and custom tables in your Log Analytics workspace. This data connector benefits from capabilities such as DCR based ingestion-time filtering, data normalization.

**Supported ASIM schema:**
1. Network Session
2. Web Session
3. Audit Event","[{""description"": ""Currently, this connector allows to ingest events from the following [Cisco Meraki REST API](https://developer.cisco.com/meraki/api-v1/#!get-organization-appliance-security-events) endpoint: \n 1. [Get Organization Appliance Security Events](https://developer.cisco.com/meraki/api-latest/#!get-organization-appliance-security-events) \n>This connector parses **IDS Alert** events into ASimNetworkSessionLogs Table and **File Scanned** events into ASimWebSessionLogs Table. \n 2. [Get Organization Api Requests](https://developer.cisco.com/meraki/api-latest/#!get-organization-api-requests) \n>This connector parses events into ASimWebSessionLogs Table. \n 3. [Get Organization Configuration Changes](https://developer.cisco.com/meraki/api-latest/#!get-organization-configuration-changes) \n>This connector parses events into ASimAuditEventLogs Table."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Organization Id"", ""placeholder"": ""OrganizationId"", ""type"": ""text"", ""name"": ""organization""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""ApiKey"", ""type"": ""password"", ""name"": ""apiKey""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""title"": ""Connect Cisco Meraki events to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Cisco Meraki REST API Key"", ""description"": ""Enable API access in Cisco Meraki and generate API Key. Please refer to Cisco Meraki official [documentation](https://aka.ms/ciscomerakiapikey) for more information.""}, {""name"": ""Cisco Meraki Organization Id"", ""description"": ""Obtain your Cisco Meraki organization id to fetch security events. Follow the steps in the [documentation](https://aka.ms/ciscomerakifindorg) to obtain the Organization Id using the Meraki API Key obtained in previous step.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Meraki%20Events%20via%20REST%20API/Data%20Connectors/CiscoMerakiMultiRule_ccp/dataConnectorDefinition.json","true" +"ASimWebSessionLogs","Cisco Meraki Events via REST API","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Meraki%20Events%20via%20REST%20API","azuresentinel","azure-sentinel-solution-ciscomerakinativepoller","2023-07-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoMerakiMultiRule","Microsoft","Cisco Meraki (using REST API)","The [Cisco Meraki](https://aka.ms/ciscomeraki) connector allows you to easily connect your Cisco Meraki organization events (Security events, Configuration Changes and API Requests) to Microsoft Sentinel. The data connector uses the [Cisco Meraki REST API](https://developer.cisco.com/meraki/api-v1/#!get-organization-appliance-security-events) to fetch logs and supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received data and ingests into ASIM and custom tables in your Log Analytics workspace. This data connector benefits from capabilities such as DCR based ingestion-time filtering, data normalization.

**Supported ASIM schema:**
1. Network Session
2. Web Session
3. Audit Event","[{""description"": ""Currently, this connector allows to ingest events from the following [Cisco Meraki REST API](https://developer.cisco.com/meraki/api-v1/#!get-organization-appliance-security-events) endpoint: \n 1. [Get Organization Appliance Security Events](https://developer.cisco.com/meraki/api-latest/#!get-organization-appliance-security-events) \n>This connector parses **IDS Alert** events into ASimNetworkSessionLogs Table and **File Scanned** events into ASimWebSessionLogs Table. \n 2. [Get Organization Api Requests](https://developer.cisco.com/meraki/api-latest/#!get-organization-api-requests) \n>This connector parses events into ASimWebSessionLogs Table. \n 3. [Get Organization Configuration Changes](https://developer.cisco.com/meraki/api-latest/#!get-organization-configuration-changes) \n>This connector parses events into ASimAuditEventLogs Table."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Organization Id"", ""placeholder"": ""OrganizationId"", ""type"": ""text"", ""name"": ""organization""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""ApiKey"", ""type"": ""password"", ""name"": ""apiKey""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""title"": ""Connect Cisco Meraki events to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Cisco Meraki REST API Key"", ""description"": ""Enable API access in Cisco Meraki and generate API Key. Please refer to Cisco Meraki official [documentation](https://aka.ms/ciscomerakiapikey) for more information.""}, {""name"": ""Cisco Meraki Organization Id"", ""description"": ""Obtain your Cisco Meraki organization id to fetch security events. Follow the steps in the [documentation](https://aka.ms/ciscomerakifindorg) to obtain the Organization Id using the Meraki API Key obtained in previous step.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Meraki%20Events%20via%20REST%20API/Data%20Connectors/CiscoMerakiMultiRule_ccp/dataConnectorDefinition.json","true" +"CiscoSDWANNetflow_CL","Cisco SD-WAN","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20SD-WAN","cisco","cisco-catalyst-sdwan-sentinel","2023-06-01","2024-06-01","","Cisco Systems","Partner","https://globalcontacts.cloudapps.cisco.com/contacts/contactDetails/en_US/c1o1-c2o2-c3o8","","domains","CiscoSDWAN","Cisco","Cisco Software Defined WAN","The Cisco Software Defined WAN(SD-WAN) data connector provides the capability to ingest [Cisco SD-WAN](https://www.cisco.com/c/en_in/solutions/enterprise-networks/sd-wan/index.html) Syslog and Netflow data into Microsoft Sentinel.","[{""description"": ""**To ingest Cisco SD-WAN Syslog and Netflow data into Microsoft Sentinel follow the steps below.**""}, {""title"": ""1. Steps to ingest Syslog data to Microsoft sentinel"", ""description"": ""Azure Monitor Agent will be used to collect the syslog data into Microsoft sentinel. For that first need to create an azure arc server for the VM from which syslog data will be sent.\n""}, {""title"": ""1.1 Steps to Add Azure Arc Server"", ""description"": ""1. In Azure portal, go to Servers - Azure Arc and click on Add.\n2. Select Generate Script under Add a single server section. A User can also generate scripts for Multiple Servers as well.\n3. Review the information on the Prerequisites page, then select Next.\n4. On the Resource details page, provide the subscription and resource group of the Microsoft Sentinel, Region, Operating system and Connectivity method. Then select Next.\n5. On the Tags page, review the default Physical location tags suggested and enter a value, or specify one or more Custom tags to support your standards. Then select Next\n6. Select Download to save the script file. \n7. Now that you have generated the script, the next step is to run it on the server that you want to onboard to Azure Arc. \n8. If you have Azure VM follow the steps mentioned in the [link](https://learn.microsoft.com/azure/azure-arc/servers/plan-evaluate-on-azure-virtual-machine) before running the script. \n9. Run the script by the following command: `./.sh`\n10. After you install the agent and configure it to connect to Azure Arc-enabled servers, go to the Azure portal to verify that the server has successfully connected. View your machine in the Azure portal.\n> **Reference link:** [https://learn.microsoft.com/azure/azure-arc/servers/learn/quick-enable-hybrid-vm](https://learn.microsoft.com/azure/azure-arc/servers/learn/quick-enable-hybrid-vm)""}, {""title"": ""1.2 Steps to Create Data Collection Rule (DCR)"", ""description"": ""1. In Azure Portal search for Monitor. Under Settings, select Data Collection Rules and Select Create.\n2. On the Basics panel, enter the Rule Name, Subscription, Resource group, Region and Platform Type.\n3. Select Next: Resources.\n4. Select Add resources.Use the filters to find the virtual machine that you'll use to collect logs.\n5. Select the virtual machine. Select Apply.\n6. Select Next: Collect and deliver.\n7. Select Add data source. For Data source type, select Linux syslog. \n8. For Minimum log level, leave the default values LOG_DEBUG.\n9. Select Next: Destination.\n10. Select Add destination and add Destination type, Subscription and Account or namespace.\n11. Select Add data source. Select Next: Review + create.\n12. Select Create. Wait for 20 minutes. In Microsoft Sentinel or Azure Monitor, verify that the Azure Monitor agent is running on your VM.\n> **Reference link:** [https://learn.microsoft.com/azure/sentinel/forward-syslog-monitor-agent](https://learn.microsoft.com/azure/sentinel/forward-syslog-monitor-agent)""}, {""title"": ""2. Steps to ingest Netflow data to Microsoft sentinel"", ""description"": ""To Ingest Netflow data into Microsoft sentinel, Filebeat and Logstash needs to be installed and configured on the VM. After the configuration, vm will be able to receive netflow data on the configured port and that data will be ingested into the workspace of Microsoft sentinel.\n""}, {""title"": ""2.1 Install filebeat and logstash"", ""description"": ""1. For the installation of filebeat and logstash using apt refer to this doc: \n 1. Filebeat: [https://www.elastic.co/guide/en/beats/filebeat/current/setup-repositories.html](https://www.elastic.co/guide/en/beats/filebeat/current/setup-repositories.html). \n 2. Logstash: [https://www.elastic.co/guide/en/logstash/current/installing-logstash.html](https://www.elastic.co/guide/en/logstash/current/installing-logstash.html). \n2. For the installation of filebeat and logstash for RedHat based Linux (yum) steps are as follows: \n 1. Filebeat: [https://www.elastic.co/guide/en/beats/filebeat/current/setup-repositories.html#_yum](https://www.elastic.co/guide/en/beats/filebeat/current/setup-repositories.html#_yum). \n 2. Logstash: [https://www.elastic.co/guide/en/logstash/current/installing-logstash.html#_yum](https://www.elastic.co/guide/en/logstash/current/installing-logstash.html#_yum)""}, {""title"": ""2.2 Configure Filebeat to send events to Logstash"", ""description"": ""1. Edit filebeat.yml file: `vi /etc/filebeat/filebeat.yml` \n2. Comment out the Elasticsearch Output section. \n3. Uncomment Logstash Output section (Uncomment out only these two lines)-\n\t\toutput.logstash\n\t\thosts: [\""localhost:5044\""] \n3. In the Logstash Output section, if you want to send the data other than the default port i.e. 5044 port, then replace the port number in the hosts field. (Note: This port should be added in the conf file, while configuring logstash.) \n4. In the 'filebeat.inputs' section comment out existing configuration and add the following configuration: \n\t\t- type: netflow\n\t\t max_message_size: 10KiB\n\t\t host: \""0.0.0.0:2055\""\n\t\t protocols: [ v5, v9, ipfix ]\n\t\t expiration_timeout: 30m\n\t\t queue_size: 8192\n\t\t custom_definitions:\n\t\t - /etc/filebeat/custom.yml\n\t\t detect_sequence_reset: true\n\t\t enabled: true \n6. In the Filebeat inputs section, if you want to receive the data other than the default port i.e. 2055 port, then replace the port number in the host field. \n7. Add the provided [custom.yml](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/Cisco%20SD-WAN/Data%20Connectors/custom.yml) file inside the /etc/filebeat/ directory. \n8. Open the filebeat input and output port in the firewall. \n 1. Run command: `firewall-cmd --zone=public --permanent --add-port=2055/udp` \n 2. Run command: `firewall-cmd --zone=public --permanent --add-port=5044/udp` \n> Note: if a custom port is added for filebeat input/output, then open that port in the firewall.""}, {""title"": ""2.3 Configure Logstash to send events to Microsoft Sentinel"", ""description"": ""1. Install the Azure Log Analytics plugin: \n 1. Run Command: `sudo /usr/share/logstash/bin/logstash-plugin install microsoft-logstash-output-azure-loganalytics` \n3. Store the Log Analytics workspace key in the Logstash key store. The workspace key can be found in Azure Portal under Log analytic workspace > Select workspace > Under Settings select Agent > Log Analytics agent instructions. \n4. Copy the Primary key and run the following commands: \n 1. `sudo /usr/share/logstash/bin/logstash-keystore --path.settings /etc/logstash create LogAnalyticsKey` \n 2. `sudo /usr/share/logstash/bin/logstash-keystore --path.settings /etc/logstash add LogAnalyticsKey` \n5. Create the configuration file /etc/logstash/cisco-netflow-to-sentinel.conf: \n\t\tinput {\n\t\t beats {\n\t\t port => #(Enter output port number which has been configured during filebeat configuration i.e. filebeat.yml file .)\n\t\t }\n\t\t}\n\t\toutput {\n\t\t microsoft-logstash-output-azure-loganalytics {\n\t\t workspace_id => \""\""\n\t\t workspace_key => \""${LogAnalyticsKey}\""\n\t\t custom_log_table_name => \""CiscoSDWANNetflow\""\n\t\t }\n\t\t} \n> Note: If table is not present in Microsoft sentinel, then it will create a new table in sentinel.""}, {""title"": ""2.4 Run Filebeat:"", ""description"": ""1. Open a terminal and run the command: \n> `systemctl start filebeat` \n2. This command will start running filebeat in the background. To see the logs stop the filebeat (`systemctl stop filebeat`) then run the following command: \n> `filebeat run -e`""}, {""title"": ""2.5 Run Logstash:"", ""description"": ""1. In another terminal run the command: \n> `/usr/share/logstash/bin/logstash --path.settings /etc/logstash -f /etc/logstash/cisco-netflow-to-sentinel.conf &` \n2. This command will start running the logstash in the background. To see the logs of logstash kill the above process and run the following command : \n> `/usr/share/logstash/bin/logstash --path.settings /etc/logstash -f /etc/logstash/cisco-netflow-to-sentinel.conf`""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20SD-WAN/Data%20Connectors/CiscoSDWAN.json","true" +"Syslog","Cisco SD-WAN","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20SD-WAN","cisco","cisco-catalyst-sdwan-sentinel","2023-06-01","2024-06-01","","Cisco Systems","Partner","https://globalcontacts.cloudapps.cisco.com/contacts/contactDetails/en_US/c1o1-c2o2-c3o8","","domains","CiscoSDWAN","Cisco","Cisco Software Defined WAN","The Cisco Software Defined WAN(SD-WAN) data connector provides the capability to ingest [Cisco SD-WAN](https://www.cisco.com/c/en_in/solutions/enterprise-networks/sd-wan/index.html) Syslog and Netflow data into Microsoft Sentinel.","[{""description"": ""**To ingest Cisco SD-WAN Syslog and Netflow data into Microsoft Sentinel follow the steps below.**""}, {""title"": ""1. Steps to ingest Syslog data to Microsoft sentinel"", ""description"": ""Azure Monitor Agent will be used to collect the syslog data into Microsoft sentinel. For that first need to create an azure arc server for the VM from which syslog data will be sent.\n""}, {""title"": ""1.1 Steps to Add Azure Arc Server"", ""description"": ""1. In Azure portal, go to Servers - Azure Arc and click on Add.\n2. Select Generate Script under Add a single server section. A User can also generate scripts for Multiple Servers as well.\n3. Review the information on the Prerequisites page, then select Next.\n4. On the Resource details page, provide the subscription and resource group of the Microsoft Sentinel, Region, Operating system and Connectivity method. Then select Next.\n5. On the Tags page, review the default Physical location tags suggested and enter a value, or specify one or more Custom tags to support your standards. Then select Next\n6. Select Download to save the script file. \n7. Now that you have generated the script, the next step is to run it on the server that you want to onboard to Azure Arc. \n8. If you have Azure VM follow the steps mentioned in the [link](https://learn.microsoft.com/azure/azure-arc/servers/plan-evaluate-on-azure-virtual-machine) before running the script. \n9. Run the script by the following command: `./.sh`\n10. After you install the agent and configure it to connect to Azure Arc-enabled servers, go to the Azure portal to verify that the server has successfully connected. View your machine in the Azure portal.\n> **Reference link:** [https://learn.microsoft.com/azure/azure-arc/servers/learn/quick-enable-hybrid-vm](https://learn.microsoft.com/azure/azure-arc/servers/learn/quick-enable-hybrid-vm)""}, {""title"": ""1.2 Steps to Create Data Collection Rule (DCR)"", ""description"": ""1. In Azure Portal search for Monitor. Under Settings, select Data Collection Rules and Select Create.\n2. On the Basics panel, enter the Rule Name, Subscription, Resource group, Region and Platform Type.\n3. Select Next: Resources.\n4. Select Add resources.Use the filters to find the virtual machine that you'll use to collect logs.\n5. Select the virtual machine. Select Apply.\n6. Select Next: Collect and deliver.\n7. Select Add data source. For Data source type, select Linux syslog. \n8. For Minimum log level, leave the default values LOG_DEBUG.\n9. Select Next: Destination.\n10. Select Add destination and add Destination type, Subscription and Account or namespace.\n11. Select Add data source. Select Next: Review + create.\n12. Select Create. Wait for 20 minutes. In Microsoft Sentinel or Azure Monitor, verify that the Azure Monitor agent is running on your VM.\n> **Reference link:** [https://learn.microsoft.com/azure/sentinel/forward-syslog-monitor-agent](https://learn.microsoft.com/azure/sentinel/forward-syslog-monitor-agent)""}, {""title"": ""2. Steps to ingest Netflow data to Microsoft sentinel"", ""description"": ""To Ingest Netflow data into Microsoft sentinel, Filebeat and Logstash needs to be installed and configured on the VM. After the configuration, vm will be able to receive netflow data on the configured port and that data will be ingested into the workspace of Microsoft sentinel.\n""}, {""title"": ""2.1 Install filebeat and logstash"", ""description"": ""1. For the installation of filebeat and logstash using apt refer to this doc: \n 1. Filebeat: [https://www.elastic.co/guide/en/beats/filebeat/current/setup-repositories.html](https://www.elastic.co/guide/en/beats/filebeat/current/setup-repositories.html). \n 2. Logstash: [https://www.elastic.co/guide/en/logstash/current/installing-logstash.html](https://www.elastic.co/guide/en/logstash/current/installing-logstash.html). \n2. For the installation of filebeat and logstash for RedHat based Linux (yum) steps are as follows: \n 1. Filebeat: [https://www.elastic.co/guide/en/beats/filebeat/current/setup-repositories.html#_yum](https://www.elastic.co/guide/en/beats/filebeat/current/setup-repositories.html#_yum). \n 2. Logstash: [https://www.elastic.co/guide/en/logstash/current/installing-logstash.html#_yum](https://www.elastic.co/guide/en/logstash/current/installing-logstash.html#_yum)""}, {""title"": ""2.2 Configure Filebeat to send events to Logstash"", ""description"": ""1. Edit filebeat.yml file: `vi /etc/filebeat/filebeat.yml` \n2. Comment out the Elasticsearch Output section. \n3. Uncomment Logstash Output section (Uncomment out only these two lines)-\n\t\toutput.logstash\n\t\thosts: [\""localhost:5044\""] \n3. In the Logstash Output section, if you want to send the data other than the default port i.e. 5044 port, then replace the port number in the hosts field. (Note: This port should be added in the conf file, while configuring logstash.) \n4. In the 'filebeat.inputs' section comment out existing configuration and add the following configuration: \n\t\t- type: netflow\n\t\t max_message_size: 10KiB\n\t\t host: \""0.0.0.0:2055\""\n\t\t protocols: [ v5, v9, ipfix ]\n\t\t expiration_timeout: 30m\n\t\t queue_size: 8192\n\t\t custom_definitions:\n\t\t - /etc/filebeat/custom.yml\n\t\t detect_sequence_reset: true\n\t\t enabled: true \n6. In the Filebeat inputs section, if you want to receive the data other than the default port i.e. 2055 port, then replace the port number in the host field. \n7. Add the provided [custom.yml](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/Cisco%20SD-WAN/Data%20Connectors/custom.yml) file inside the /etc/filebeat/ directory. \n8. Open the filebeat input and output port in the firewall. \n 1. Run command: `firewall-cmd --zone=public --permanent --add-port=2055/udp` \n 2. Run command: `firewall-cmd --zone=public --permanent --add-port=5044/udp` \n> Note: if a custom port is added for filebeat input/output, then open that port in the firewall.""}, {""title"": ""2.3 Configure Logstash to send events to Microsoft Sentinel"", ""description"": ""1. Install the Azure Log Analytics plugin: \n 1. Run Command: `sudo /usr/share/logstash/bin/logstash-plugin install microsoft-logstash-output-azure-loganalytics` \n3. Store the Log Analytics workspace key in the Logstash key store. The workspace key can be found in Azure Portal under Log analytic workspace > Select workspace > Under Settings select Agent > Log Analytics agent instructions. \n4. Copy the Primary key and run the following commands: \n 1. `sudo /usr/share/logstash/bin/logstash-keystore --path.settings /etc/logstash create LogAnalyticsKey` \n 2. `sudo /usr/share/logstash/bin/logstash-keystore --path.settings /etc/logstash add LogAnalyticsKey` \n5. Create the configuration file /etc/logstash/cisco-netflow-to-sentinel.conf: \n\t\tinput {\n\t\t beats {\n\t\t port => #(Enter output port number which has been configured during filebeat configuration i.e. filebeat.yml file .)\n\t\t }\n\t\t}\n\t\toutput {\n\t\t microsoft-logstash-output-azure-loganalytics {\n\t\t workspace_id => \""\""\n\t\t workspace_key => \""${LogAnalyticsKey}\""\n\t\t custom_log_table_name => \""CiscoSDWANNetflow\""\n\t\t }\n\t\t} \n> Note: If table is not present in Microsoft sentinel, then it will create a new table in sentinel.""}, {""title"": ""2.4 Run Filebeat:"", ""description"": ""1. Open a terminal and run the command: \n> `systemctl start filebeat` \n2. This command will start running filebeat in the background. To see the logs stop the filebeat (`systemctl stop filebeat`) then run the following command: \n> `filebeat run -e`""}, {""title"": ""2.5 Run Logstash:"", ""description"": ""1. In another terminal run the command: \n> `/usr/share/logstash/bin/logstash --path.settings /etc/logstash -f /etc/logstash/cisco-netflow-to-sentinel.conf &` \n2. This command will start running the logstash in the background. To see the logs of logstash kill the above process and run the following command : \n> `/usr/share/logstash/bin/logstash --path.settings /etc/logstash -f /etc/logstash/cisco-netflow-to-sentinel.conf`""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20SD-WAN/Data%20Connectors/CiscoSDWAN.json","true" +"Syslog","Cisco Secure Cloud Analytics","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Secure%20Cloud%20Analytics","azuresentinel","azure-sentinel-solution-ciscostealthwatch","2021-10-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","Stealthwatch","Cisco","[Deprecated] Cisco Secure Cloud Analytics","The [Cisco Secure Cloud Analytics](https://www.cisco.com/c/en/us/products/security/stealthwatch/index.html) data connector provides the capability to ingest [Cisco Secure Cloud Analytics events](https://www.cisco.com/c/dam/en/us/td/docs/security/stealthwatch/management_console/securit_events_alarm_categories/7_4_2_Security_Events_and_Alarm_Categories_DV_2_1.pdf) into Microsoft Sentinel. Refer to [Cisco Secure Cloud Analytics documentation](https://www.cisco.com/c/dam/en/us/td/docs/security/stealthwatch/system_installation_configuration/7_5_0_System_Configuration_Guide_DV_1_3.pdf) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**StealthwatchEvent**](https://aka.ms/sentinel-stealthwatch-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**NOTE:** This data connector has been developed using Cisco Secure Cloud Analytics version 7.3.2"", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Server where the Cisco Secure Cloud Analytics logs are forwarded.\n\n> Logs from Cisco Secure Cloud Analytics Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure Cisco Secure Cloud Analytics event forwarding"", ""description"": ""Follow the configuration steps below to get Cisco Secure Cloud Analytics logs into Microsoft Sentinel.\n1. Log in to the Stealthwatch Management Console (SMC) as an administrator.\n2. In the menu bar, click **Configuration** **>** **Response Management**.\n3. From the **Actions** section in the **Response Management** menu, click **Add > Syslog Message**.\n4. In the Add Syslog Message Action window, configure parameters.\n5. Enter the following custom format:\n|Lancope|Stealthwatch|7.3|{alarm_type_id}|0x7C|src={source_ip}|dst={target_ip}|dstPort={port}|proto={protocol}|msg={alarm_type_description}|fullmessage={details}|start={start_active_time}|end={end_active_time}|cat={alarm_category_name}|alarmID={alarm_id}|sourceHG={source_host_group_names}|targetHG={target_host_group_names}|sourceHostSnapshot={source_url}|targetHostSnapshot={target_url}|flowCollectorName={device_name}|flowCollectorIP={device_ip}|domain={domain_name}|exporterName={exporter_hostname}|exporterIPAddress={exporter_ip}|exporterInfo={exporter_label}|targetUser={target_username}|targetHostname={target_hostname}|sourceUser={source_username}|alarmStatus={alarm_status}|alarmSev={alarm_severity_name}\n\n6. Select the custom format from the list and click **OK**\n7. Click **Response Management > Rules**.\n8. Click **Add** and select **Host Alarm**.\n9. Provide a rule name in the **Name** field.\n10. Create rules by selecting values from the Type and Options menus. To add more rules, click the ellipsis icon. For a Host Alarm, combine as many possible types in a statement as possible.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Secure%20Cloud%20Analytics/Data%20Connectors/Cisco_Stealthwatch_syslog.json","true" +"CiscoSecureEndpoint_CL","Cisco Secure Endpoint","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Secure%20Endpoint","azuresentinel","azure-sentinel-solution-ciscosecureendpoint","2021-10-28","2022-02-02","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoSecureEndpoint","Cisco","[DEPRECATED] Cisco Secure Endpoint (AMP)","The Cisco Secure Endpoint (formerly AMP for Endpoints) data connector provides the capability to ingest Cisco Secure Endpoint [audit logs](https://api-docs.amp.cisco.com/api_resources/AuditLog?api_host=api.amp.cisco.com&api_version=v1) and [events](https://api-docs.amp.cisco.com/api_actions/details?api_action=GET+%2Fv1%2Fevents&api_host=api.amp.cisco.com&api_resource=Event&api_version=v1) into Microsoft Sentinel.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Cisco Secure Endpoint API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**CiscoSecureEndpoint**](https://aka.ms/sentinel-ciscosecureendpoint-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Obtaining Cisco Secure Endpoint API credentials**\n\n1. Follow the instructions in the [documentation](https://api-docs.amp.cisco.com/api_resources?api_host=api.amp.cisco.com&api_version=v1) to generate Client ID and API Key.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as Azure Blob Storage connection string and container name, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ciscosecureendpoint-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Cisco Secure Endpoint Api Host**, **Cisco Secure Endpoint Client Id**, **Cisco Secure Endpoint Api Key**, **Microsoft Sentinel Workspace Id**, **Microsoft Sentinel Shared Key**\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-ciscosecureendpoint-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions.\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tCISCO_SE_API_API_HOST\n\t\tCISCO_SE_API_CLIENT_ID\n\t\tCISCO_SE_API_KEY\n\t\tWORKSPACE_ID\n\t\tSHARED_KEY\n\t\tlogAnalyticsUri (Optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://WORKSPACE_ID.ods.opinsights.azure.us`. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Cisco Secure Endpoint API credentials"", ""description"": ""Cisco Secure Endpoint Client ID and API Key are required. [See the documentation to learn more about Cisco Secure Endpoint API](https://api-docs.amp.cisco.com/api_resources?api_host=api.amp.cisco.com&api_version=v1). [API domain](https://api-docs.amp.cisco.com) must be provided as well.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Secure%20Endpoint/Data%20Connectors/CiscoSecureEndpoint_API_FunctionApp.json","true" +"CiscoSecureEndpointAuditLogsV2_CL","Cisco Secure Endpoint","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Secure%20Endpoint","azuresentinel","azure-sentinel-solution-ciscosecureendpoint","2021-10-28","2022-02-02","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoSecureEndpointLogsCCPDefinition","Microsoft","Cisco Secure Endpoint (via Codeless Connector Framework)","The Cisco Secure Endpoint (formerly AMP for Endpoints) data connector provides the capability to ingest Cisco Secure Endpoint [audit logs](https://developer.cisco.com/docs/secure-endpoint/auditlog/) and [events](https://developer.cisco.com/docs/secure-endpoint/v1-api-reference-event/) into Microsoft Sentinel.","[{""description"": ""To ingest data from Cisco Secure Endpoint to Microsoft Sentinel, you have to click on Add Account button below, then you get a pop up to fill the details like Email, Organization, Client ID, API Key and Region, provide the required information and click on Connect. You can see the connected organizations/emails in the below grid.\n>"", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Organization"", ""columnValue"": ""properties.addOnAttributes.Organization""}, {""columnName"": ""Email"", ""columnValue"": ""properties.addOnAttributes.Email""}, {""columnName"": ""Endpoint"", ""columnValue"": ""properties.request.apiEndpoint""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add Account"", ""title"": ""Add Account"", ""subtitle"": ""Add Account"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Cisco Secure Endpoint Email"", ""placeholder"": ""Enter your Cisco Email"", ""type"": ""text"", ""name"": ""email"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Cisco Secure Endpoint Organization"", ""placeholder"": ""Enter the name of your Organization"", ""type"": ""text"", ""name"": ""organization"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Cisco Secure Endpoint Client ID"", ""placeholder"": ""Enter your Client ID"", ""type"": ""text"", ""name"": ""username"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Cisco Secure Endpoint API Key"", ""placeholder"": ""Enter your API Key"", ""type"": ""password"", ""name"": ""apiKey"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Cisco Secure Endpoint Region"", ""placeholder"": ""Enter the region you want to connect"", ""type"": ""text"", ""name"": ""region"", ""required"": true, ""description"": ""For example, if your region is https://api.apjc.amp.cisco.com then enter only apjc.amp in the above field. Follow the link provided in the Cisco Secure Endpoint API Credentials/Regions section for better understanding of the regions.""}}]}]}}], ""title"": ""Connect Cisco Secure Endpoint to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Cisco Secure Endpoint API Credentials/Regions"", ""description"": ""To create API Credentials and to understand the regions, follow the document link provided here. [Click here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Secure%20Endpoint/Data%20Connectors/README.md).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Secure%20Endpoint/Data%20Connectors/CiscoSecureEndpointLogs_ccp/CiscoSecureEndpointLogs_ConnectorDefinition.json","true" +"CiscoSecureEndpointEventsV2_CL","Cisco Secure Endpoint","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Secure%20Endpoint","azuresentinel","azure-sentinel-solution-ciscosecureendpoint","2021-10-28","2022-02-02","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoSecureEndpointLogsCCPDefinition","Microsoft","Cisco Secure Endpoint (via Codeless Connector Framework)","The Cisco Secure Endpoint (formerly AMP for Endpoints) data connector provides the capability to ingest Cisco Secure Endpoint [audit logs](https://developer.cisco.com/docs/secure-endpoint/auditlog/) and [events](https://developer.cisco.com/docs/secure-endpoint/v1-api-reference-event/) into Microsoft Sentinel.","[{""description"": ""To ingest data from Cisco Secure Endpoint to Microsoft Sentinel, you have to click on Add Account button below, then you get a pop up to fill the details like Email, Organization, Client ID, API Key and Region, provide the required information and click on Connect. You can see the connected organizations/emails in the below grid.\n>"", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Organization"", ""columnValue"": ""properties.addOnAttributes.Organization""}, {""columnName"": ""Email"", ""columnValue"": ""properties.addOnAttributes.Email""}, {""columnName"": ""Endpoint"", ""columnValue"": ""properties.request.apiEndpoint""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add Account"", ""title"": ""Add Account"", ""subtitle"": ""Add Account"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Cisco Secure Endpoint Email"", ""placeholder"": ""Enter your Cisco Email"", ""type"": ""text"", ""name"": ""email"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Cisco Secure Endpoint Organization"", ""placeholder"": ""Enter the name of your Organization"", ""type"": ""text"", ""name"": ""organization"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Cisco Secure Endpoint Client ID"", ""placeholder"": ""Enter your Client ID"", ""type"": ""text"", ""name"": ""username"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Cisco Secure Endpoint API Key"", ""placeholder"": ""Enter your API Key"", ""type"": ""password"", ""name"": ""apiKey"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Cisco Secure Endpoint Region"", ""placeholder"": ""Enter the region you want to connect"", ""type"": ""text"", ""name"": ""region"", ""required"": true, ""description"": ""For example, if your region is https://api.apjc.amp.cisco.com then enter only apjc.amp in the above field. Follow the link provided in the Cisco Secure Endpoint API Credentials/Regions section for better understanding of the regions.""}}]}]}}], ""title"": ""Connect Cisco Secure Endpoint to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Cisco Secure Endpoint API Credentials/Regions"", ""description"": ""To create API Credentials and to understand the regions, follow the document link provided here. [Click here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Secure%20Endpoint/Data%20Connectors/README.md).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20Secure%20Endpoint/Data%20Connectors/CiscoSecureEndpointLogs_ccp/CiscoSecureEndpointLogs_ConnectorDefinition.json","true" +"Syslog","Cisco UCS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20UCS","azuresentinel","azure-sentinel-solution-ciscoucs","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoUCS","Cisco","[Deprecated] Cisco UCS","The [Cisco Unified Computing System (UCS)](https://www.cisco.com/c/en/us/products/servers-unified-computing/index.html) connector allows you to easily connect your Cisco UCS logs with Microsoft Sentinel This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias CiscoUCS and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20UCS/Parsers/CiscoUCS.yaml). The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n 2. Select **Apply below configuration to my machines** and select the facilities and severities.\n 3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure and connect the Cisco UCS"", ""description"": ""[Follow these instructions](https://www.cisco.com/c/en/us/support/docs/servers-unified-computing/ucs-manager/110265-setup-syslog-for-ucs.html#configsremotesyslog) to configure the Cisco UCS to forward syslog. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Cisco UCS"", ""description"": ""must be configured to export logs via Syslog""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cisco%20UCS/Data%20Connectors/Connector_Syslog_CiscoUCS.json","true" +"","CiscoASA","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoASA","azuresentinel","azure-sentinel-solution-ciscoasa","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"CiscoDuo_CL","CiscoDuoSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoDuoSecurity","cisco","duo-security-sentinel","2022-01-07","","","Cisco Systems","Partner","https://duo.com/support","","domains","CiscoDuoSecurity","Cisco","Cisco Duo Security","The Cisco Duo Security data connector provides the capability to ingest [authentication logs](https://duo.com/docs/adminapi#authentication-logs), [administrator logs](https://duo.com/docs/adminapi#administrator-logs), [telephony logs](https://duo.com/docs/adminapi#telephony-logs), [offline enrollment logs](https://duo.com/docs/adminapi#offline-enrollment-logs) and [Trust Monitor events](https://duo.com/docs/adminapi#trust-monitor) into Microsoft Sentinel using the Cisco Duo Admin API. Refer to [API documentation](https://duo.com/docs/adminapi) for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Cisco Duo API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**CiscoDuo**](https://aka.ms/sentinel-CiscoDuoSecurity-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Obtaining Cisco Duo Admin API credentials**\n\n1. Follow [the instructions](https://duo.com/docs/adminapi#first-steps) to obtain **integration key**, **secret key**, and **API hostname**. Use **Grant read log** permission in the 4th step of [the instructions](https://duo.com/docs/adminapi#first-steps).""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as Azure Blob Storage connection string and container name, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CiscoDuoSecurity-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-CiscoDuoSecurity-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Cisco Duo Integration Key**, **Cisco Duo Secret Key**, **Cisco Duo API Hostname**, **Cisco Duo Log Types**, **Microsoft Sentinel Workspace Id**, **Microsoft Sentinel Shared Key**\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the [Azure Function App](https://aka.ms/sentinel-CiscoDuoSecurity-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tCISCO_DUO_INTEGRATION_KEY\n\t\tCISCO_DUO_SECRET_KEY\n\t\tCISCO_DUO_API_HOSTNAME\n\t\tCISCO_DUO_LOG_TYPES\n\t\tWORKSPACE_ID\n\t\tSHARED_KEY\n\t\tlogAnalyticsUri (Optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://WORKSPACE_ID.ods.opinsights.azure.us`. \n4. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Cisco Duo API credentials"", ""description"": ""Cisco Duo API credentials with permission *Grant read log* is required for Cisco Duo API. See the [documentation](https://duo.com/docs/adminapi#first-steps) to learn more about creating Cisco Duo API credentials.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoDuoSecurity/Data%20Connectors/CiscoDuo_API_FunctionApp.json","true" +"meraki_CL","CiscoMeraki","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoMeraki","azuresentinel","azure-sentinel-solution-ciscomeraki","2021-09-08","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoMeraki","Cisco","[Deprecated] Cisco Meraki","The [Cisco Meraki](https://meraki.cisco.com/) connector allows you to easily connect your Cisco Meraki (MX/MR/MS) logs with Microsoft Sentinel. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias CiscoMeraki and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoMeraki/Parsers/CiscoMeraki.txt). The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Follow the configuration steps below to get Cisco Meraki device logs into Microsoft Sentinel. Refer to the [Azure Monitor Documentation](https://docs.microsoft.com/azure/azure-monitor/agents/data-sources-json) for more details on these steps.\n For Cisco Meraki logs, we have issues while parsing the data by OMS agent data using default settings. \nSo we advice to capture the logs into custom table **meraki_CL** using below instructions. \n1. Login to the server where you have installed OMS agent.\n2. Download config file [meraki.conf](https://aka.ms/sentinel-ciscomerakioms-conf) \n\t\twget -v https://aka.ms/sentinel-ciscomerakioms-conf -O meraki.conf \n3. Copy meraki.conf to the /etc/opt/microsoft/omsagent/**workspace_id**/conf/omsagent.d/ folder. \n\t\tcp meraki.conf /etc/opt/microsoft/omsagent/<>/conf/omsagent.d/\n4. Edit meraki.conf as follows:\n\n\t a. meraki.conf uses the port **22033** by default. Ensure this port is not being used by any other source on your server\n\n\t b. If you would like to change the default port for **meraki.conf** make sure that you dont use default Azure monitoring /log analytic agent ports I.e.(For example CEF uses TCP port **25226** or **25224**) \n\n\t c. replace **workspace_id** with real value of your Workspace ID (lines 14,15,16,19)\n5. Save changes and restart the Azure Log Analytics agent for Linux service with the following command:\n\t\tsudo /opt/microsoft/omsagent/bin/service_control restart\n6. Modify /etc/rsyslog.conf file - add below template preferably at the beginning / before directives section \n\t\t$template meraki,\""%timestamp% %hostname% %msg%\\n\"" \n7. Create a custom conf file in /etc/rsyslog.d/ for example 10-meraki.conf and add following filter conditions.\n\n\t With an added statement you will need to create a filter which will specify the logs coming from the Cisco Meraki to be forwarded to the custom table.\n\n\t reference: [Filter Conditions \u2014 rsyslog 8.18.0.master documentation](https://rsyslog.readthedocs.io/en/latest/configuration/filters.html)\n\n\t Here is an example of filtering that can be defined, this is not complete and will require additional testing for each installation.\n\t\t if $rawmsg contains \""flows\"" then @@127.0.0.1:22033;meraki\n\t\t & stop\n\t\t if $rawmsg contains \""firewall\"" then @@127.0.0.1:22033;meraki\n\t\t & stop\n\t\t if $rawmsg contains \""urls\"" then @@127.0.0.1:22033;meraki\n\t\t & stop\n\t\t if $rawmsg contains \""ids-alerts\"" then @@127.0.0.1:22033;meraki\n\t\t & stop\n\t\t if $rawmsg contains \""events\"" then @@127.0.0.1:22033;meraki\n\t\t & stop\n\t\t if $rawmsg contains \""ip_flow_start\"" then @@127.0.0.1:22033;meraki\n\t\t & stop\n\t\t if $rawmsg contains \""ip_flow_end\"" then @@127.0.0.1:22033;meraki\n\t\t & stop \n8. Restart rsyslog\n\t\t systemctl restart rsyslog"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Configure and connect the Cisco Meraki device(s)"", ""description"": ""[Follow these instructions](https://documentation.meraki.com/General_Administration/Monitoring_and_Reporting/Meraki_Device_Reporting_-_Syslog%2C_SNMP_and_API) to configure the Cisco Meraki device(s) to forward syslog. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Cisco Meraki"", ""description"": ""must be configured to export logs via Syslog""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoMeraki/Data%20Connectors/Connector_Syslog_CiscoMeraki.json","true" +"CiscoMerakiNativePoller_CL","CiscoMeraki","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoMeraki","azuresentinel","azure-sentinel-solution-ciscomeraki","2021-09-08","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoMerakiNativePoller","Microsoft","Cisco Meraki (using REST API)","The [Cisco Meraki](https://aka.ms/ciscomeraki) connector allows you to easily connect your Cisco Meraki MX [security events](https://aka.ms/ciscomerakisecurityevents) to Microsoft Sentinel. The data connector uses [Cisco Meraki REST API](https://developer.cisco.com/meraki/api-v1/#!get-organization-appliance-security-events) to fetch security events and supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security event data into a custom columns so that queries don't need to parse it again, thus resulting in better performance.

**Supported ASIM schema:**
1. Network Session","[{""title"": ""Connect Cisco Meraki Security Events to Microsoft Sentinel"", ""description"": ""To enable Cisco Meraki Security Events for Microsoft Sentinel, provide the required information below and click on Connect.\n>This data connector depends on a parser based on a Kusto Function to render the content. [**CiscoMeraki**](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/CiscoMeraki/Parsers/CiscoMeraki.txt) Parser currently support only \""**IDS Alert**\"" and \""**File Scanned**\"" Events."", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""Organization Id"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{organization}}""}], ""transformation"": {""transformationType"": ""predefinedTransformation"", ""outputStream"": ""[concat('Custom-', variables('streamName'))]"", ""dataCollectionRuleTemplateSpecName"": ""[variables('dataCollectionRuleId')]"", ""logAnalyticsTableTemplateSpecName"": ""[variables('logAnalyticsTableId')]""}}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Cisco Meraki REST API Key"", ""description"": ""Enable API access in Cisco Meraki and generate API Key. Please refer to Cisco Meraki official [documentation](https://aka.ms/ciscomerakiapikey) for more information.""}, {""name"": ""Cisco Meraki Organization Id"", ""description"": ""Obtain your Cisco Meraki organization id to fetch security events. Follow the steps in the [documentation](https://aka.ms/ciscomerakifindorg) to obtain the Organization Id using the Meraki API Key obtained in previous step.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoMeraki/Data%20Connectors/CiscoMerakiNativePollerConnector/azuredeploy_Cisco_Meraki_native_poller_connector.json","true" +"meraki_CL","CiscoMeraki","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoMeraki","azuresentinel","azure-sentinel-solution-ciscomeraki","2021-09-08","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoMerakiNativePoller","Microsoft","Cisco Meraki (using REST API)","The [Cisco Meraki](https://aka.ms/ciscomeraki) connector allows you to easily connect your Cisco Meraki MX [security events](https://aka.ms/ciscomerakisecurityevents) to Microsoft Sentinel. The data connector uses [Cisco Meraki REST API](https://developer.cisco.com/meraki/api-v1/#!get-organization-appliance-security-events) to fetch security events and supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security event data into a custom columns so that queries don't need to parse it again, thus resulting in better performance.

**Supported ASIM schema:**
1. Network Session","[{""title"": ""Connect Cisco Meraki Security Events to Microsoft Sentinel"", ""description"": ""To enable Cisco Meraki Security Events for Microsoft Sentinel, provide the required information below and click on Connect.\n>This data connector depends on a parser based on a Kusto Function to render the content. [**CiscoMeraki**](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/CiscoMeraki/Parsers/CiscoMeraki.txt) Parser currently support only \""**IDS Alert**\"" and \""**File Scanned**\"" Events."", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""Organization Id"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{organization}}""}], ""transformation"": {""transformationType"": ""predefinedTransformation"", ""outputStream"": ""[concat('Custom-', variables('streamName'))]"", ""dataCollectionRuleTemplateSpecName"": ""[variables('dataCollectionRuleId')]"", ""logAnalyticsTableTemplateSpecName"": ""[variables('logAnalyticsTableId')]""}}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Cisco Meraki REST API Key"", ""description"": ""Enable API access in Cisco Meraki and generate API Key. Please refer to Cisco Meraki official [documentation](https://aka.ms/ciscomerakiapikey) for more information.""}, {""name"": ""Cisco Meraki Organization Id"", ""description"": ""Obtain your Cisco Meraki organization id to fetch security events. Follow the steps in the [documentation](https://aka.ms/ciscomerakifindorg) to obtain the Organization Id using the Meraki API Key obtained in previous step.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoMeraki/Data%20Connectors/CiscoMerakiNativePollerConnector/azuredeploy_Cisco_Meraki_native_poller_connector.json","true" +"CommonSecurityLog","CiscoSEG","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoSEG","azuresentinel","azure-sentinel-solution-ciscoseg","2021-06-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoSEG","Cisco","[Deprecated] Cisco Secure Email Gateway via Legacy Agent","The [Cisco Secure Email Gateway (SEG)](https://www.cisco.com/c/en/us/products/security/email-security/index.html) data connector provides the capability to ingest [Cisco SEG Consolidated Event Logs](https://www.cisco.com/c/en/us/td/docs/security/esa/esa14-0/user_guide/b_ESA_Admin_Guide_14-0/b_ESA_Admin_Guide_12_1_chapter_0100111.html#con_1061902) into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**CiscoSEGEvent**](https://aka.ms/sentinel-CiscoSEG-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**NOTE:** This data connector has been developed using AsyncOS 14.0 for Cisco Secure Email Gateway"", ""instructions"": []}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Follow these steps to configure Cisco Secure Email Gateway to forward logs via syslog:\n\n2.1. Configure [Log Subscription](https://www.cisco.com/c/en/us/td/docs/security/esa/esa14-0/user_guide/b_ESA_Admin_Guide_14-0/b_ESA_Admin_Guide_12_1_chapter_0100111.html#con_1134718)\n\n>**NOTE:** Select **Consolidated Event Logs** in Log Type field.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoSEG/Data%20Connectors/Connector_Cisco_SEG_CEF.json","true" +"CommonSecurityLog","CiscoSEG","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoSEG","azuresentinel","azure-sentinel-solution-ciscoseg","2021-06-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoSEGAma","Cisco","[Deprecated] Cisco Secure Email Gateway via AMA","The [Cisco Secure Email Gateway (SEG)](https://www.cisco.com/c/en/us/products/security/email-security/index.html) data connector provides the capability to ingest [Cisco SEG Consolidated Event Logs](https://www.cisco.com/c/en/us/td/docs/security/esa/esa14-0/user_guide/b_ESA_Admin_Guide_14-0/b_ESA_Admin_Guide_12_1_chapter_0100111.html#con_1061902) into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**CiscoSEGEvent**](https://aka.ms/sentinel-CiscoSEG-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Follow these steps to configure Cisco Secure Email Gateway to forward logs via syslog:\n\n Configure [Log Subscription](https://www.cisco.com/c/en/us/td/docs/security/esa/esa14-0/user_guide/b_ESA_Admin_Guide_14-0/b_ESA_Admin_Guide_12_1_chapter_0100111.html#con_1134718)\n\n>**NOTE:** Select **Consolidated Event Logs** in Log Type field."", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""2Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoSEG/Data%20Connectors/template_CiscoSEGAMA.json","true" +"Cisco_Umbrella_audit_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnector","Cisco","Cisco Cloud Security","The Cisco Cloud Security solution for Microsoft Sentinel enables you to ingest [Cisco Secure Access](https://docs.sse.cisco.com/sse-user-guide/docs/welcome-cisco-secure-access) and [Cisco Umbrella](https://docs.umbrella.com/umbrella-user-guide/docs/getting-started) [logs](https://docs.sse.cisco.com/sse-user-guide/docs/manage-your-logs) stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Cloud Security log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [Cisco Cloud Security log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration of the Cisco Cloud Security logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Cloud Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Cloud Security data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Cloud Security data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp.json","true" +"Cisco_Umbrella_cloudfirewall_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnector","Cisco","Cisco Cloud Security","The Cisco Cloud Security solution for Microsoft Sentinel enables you to ingest [Cisco Secure Access](https://docs.sse.cisco.com/sse-user-guide/docs/welcome-cisco-secure-access) and [Cisco Umbrella](https://docs.umbrella.com/umbrella-user-guide/docs/getting-started) [logs](https://docs.sse.cisco.com/sse-user-guide/docs/manage-your-logs) stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Cloud Security log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [Cisco Cloud Security log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration of the Cisco Cloud Security logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Cloud Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Cloud Security data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Cloud Security data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp.json","true" +"Cisco_Umbrella_dlp_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnector","Cisco","Cisco Cloud Security","The Cisco Cloud Security solution for Microsoft Sentinel enables you to ingest [Cisco Secure Access](https://docs.sse.cisco.com/sse-user-guide/docs/welcome-cisco-secure-access) and [Cisco Umbrella](https://docs.umbrella.com/umbrella-user-guide/docs/getting-started) [logs](https://docs.sse.cisco.com/sse-user-guide/docs/manage-your-logs) stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Cloud Security log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [Cisco Cloud Security log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration of the Cisco Cloud Security logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Cloud Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Cloud Security data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Cloud Security data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp.json","true" +"Cisco_Umbrella_dns_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnector","Cisco","Cisco Cloud Security","The Cisco Cloud Security solution for Microsoft Sentinel enables you to ingest [Cisco Secure Access](https://docs.sse.cisco.com/sse-user-guide/docs/welcome-cisco-secure-access) and [Cisco Umbrella](https://docs.umbrella.com/umbrella-user-guide/docs/getting-started) [logs](https://docs.sse.cisco.com/sse-user-guide/docs/manage-your-logs) stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Cloud Security log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [Cisco Cloud Security log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration of the Cisco Cloud Security logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Cloud Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Cloud Security data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Cloud Security data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp.json","true" +"Cisco_Umbrella_fileevent_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnector","Cisco","Cisco Cloud Security","The Cisco Cloud Security solution for Microsoft Sentinel enables you to ingest [Cisco Secure Access](https://docs.sse.cisco.com/sse-user-guide/docs/welcome-cisco-secure-access) and [Cisco Umbrella](https://docs.umbrella.com/umbrella-user-guide/docs/getting-started) [logs](https://docs.sse.cisco.com/sse-user-guide/docs/manage-your-logs) stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Cloud Security log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [Cisco Cloud Security log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration of the Cisco Cloud Security logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Cloud Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Cloud Security data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Cloud Security data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp.json","true" +"Cisco_Umbrella_firewall_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnector","Cisco","Cisco Cloud Security","The Cisco Cloud Security solution for Microsoft Sentinel enables you to ingest [Cisco Secure Access](https://docs.sse.cisco.com/sse-user-guide/docs/welcome-cisco-secure-access) and [Cisco Umbrella](https://docs.umbrella.com/umbrella-user-guide/docs/getting-started) [logs](https://docs.sse.cisco.com/sse-user-guide/docs/manage-your-logs) stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Cloud Security log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [Cisco Cloud Security log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration of the Cisco Cloud Security logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Cloud Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Cloud Security data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Cloud Security data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp.json","true" +"Cisco_Umbrella_intrusion_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnector","Cisco","Cisco Cloud Security","The Cisco Cloud Security solution for Microsoft Sentinel enables you to ingest [Cisco Secure Access](https://docs.sse.cisco.com/sse-user-guide/docs/welcome-cisco-secure-access) and [Cisco Umbrella](https://docs.umbrella.com/umbrella-user-guide/docs/getting-started) [logs](https://docs.sse.cisco.com/sse-user-guide/docs/manage-your-logs) stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Cloud Security log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [Cisco Cloud Security log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration of the Cisco Cloud Security logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Cloud Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Cloud Security data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Cloud Security data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp.json","true" +"Cisco_Umbrella_ip_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnector","Cisco","Cisco Cloud Security","The Cisco Cloud Security solution for Microsoft Sentinel enables you to ingest [Cisco Secure Access](https://docs.sse.cisco.com/sse-user-guide/docs/welcome-cisco-secure-access) and [Cisco Umbrella](https://docs.umbrella.com/umbrella-user-guide/docs/getting-started) [logs](https://docs.sse.cisco.com/sse-user-guide/docs/manage-your-logs) stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Cloud Security log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [Cisco Cloud Security log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration of the Cisco Cloud Security logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Cloud Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Cloud Security data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Cloud Security data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp.json","true" +"Cisco_Umbrella_proxy_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnector","Cisco","Cisco Cloud Security","The Cisco Cloud Security solution for Microsoft Sentinel enables you to ingest [Cisco Secure Access](https://docs.sse.cisco.com/sse-user-guide/docs/welcome-cisco-secure-access) and [Cisco Umbrella](https://docs.umbrella.com/umbrella-user-guide/docs/getting-started) [logs](https://docs.sse.cisco.com/sse-user-guide/docs/manage-your-logs) stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Cloud Security log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [Cisco Cloud Security log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration of the Cisco Cloud Security logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Cloud Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Cloud Security data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Cloud Security data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp.json","true" +"Cisco_Umbrella_ravpnlogs_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnector","Cisco","Cisco Cloud Security","The Cisco Cloud Security solution for Microsoft Sentinel enables you to ingest [Cisco Secure Access](https://docs.sse.cisco.com/sse-user-guide/docs/welcome-cisco-secure-access) and [Cisco Umbrella](https://docs.umbrella.com/umbrella-user-guide/docs/getting-started) [logs](https://docs.sse.cisco.com/sse-user-guide/docs/manage-your-logs) stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Cloud Security log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [Cisco Cloud Security log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration of the Cisco Cloud Security logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Cloud Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Cloud Security data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Cloud Security data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp.json","true" +"Cisco_Umbrella_ztaflow_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnector","Cisco","Cisco Cloud Security","The Cisco Cloud Security solution for Microsoft Sentinel enables you to ingest [Cisco Secure Access](https://docs.sse.cisco.com/sse-user-guide/docs/welcome-cisco-secure-access) and [Cisco Umbrella](https://docs.umbrella.com/umbrella-user-guide/docs/getting-started) [logs](https://docs.sse.cisco.com/sse-user-guide/docs/manage-your-logs) stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Cloud Security log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [Cisco Cloud Security log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration of the Cisco Cloud Security logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Cloud Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Cloud Security data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Cloud Security data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp.json","true" +"Cisco_Umbrella_ztna_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnector","Cisco","Cisco Cloud Security","The Cisco Cloud Security solution for Microsoft Sentinel enables you to ingest [Cisco Secure Access](https://docs.sse.cisco.com/sse-user-guide/docs/welcome-cisco-secure-access) and [Cisco Umbrella](https://docs.umbrella.com/umbrella-user-guide/docs/getting-started) [logs](https://docs.sse.cisco.com/sse-user-guide/docs/manage-your-logs) stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Cloud Security log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [Cisco Cloud Security log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration of the Cisco Cloud Security logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Cloud Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Cloud Security data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Cloud Security data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp.json","true" +"Cisco_Umbrella_audit_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnectorelasticpremium","Cisco","Cisco Cloud Security (using elastic premium plan)","The Cisco Umbrella data connector provides the capability to ingest [Cisco Umbrella](https://docs.umbrella.com/) events stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Umbrella log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.

**NOTE:** This data connector uses the [Azure Functions Premium Plan](https://learn.microsoft.com/azure/azure-functions/functions-premium-plan?tabs=portal) to enable secure ingestion capabilities and will incur additional costs. More pricing details are [here](https://azure.microsoft.com/pricing/details/functions/?msockid=2f4366822d836a7c2ac673462cfc6ba8#pricing).","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [cisco umbrella log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Network Prerequisites for Private Access**\n\n>**IMPORTANT:** When deploying with private storage account access, ensure the following network prerequisites are met:\n> - **Virtual Network**: An existing Virtual Network (VNet) must be available\n> - **Subnet**: A dedicated subnet within the VNet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration\n> - **Subnet Delegation**: Configure the subnet delegation using Azure Portal, ARM template, or Azure CLI:\n> - Azure Portal: Go to Virtual networks \u2192 Select your VNet \u2192 Subnets \u2192 Select subnet \u2192 Delegate subnet to service \u2192 Choose **Microsoft.Web/serverFarms**\n> - Azure CLI: `az network vnet subnet update --resource-group --vnet-name --name --delegations Microsoft.Web/serverFarms`\n> - **Private Endpoints**: The deployment will create private endpoints for storage account services (blob, file, queue, table) within the same subnet""}, {""title"": """", ""description"": ""**STEP 2 - Configuration of the Cisco Umbrella logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Umbrella data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Umbrella data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n4. **For Private Access Deployment**: Also enter **existingVnetName**, **existingVnetResourceGroupName**, and **existingSubnetName** (ensure subnet is delegated to Microsoft.Web/serverFarms)\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n5. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n6. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Umbrella data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}, {""name"": ""Virtual Network permissions (for private access)"", ""description"": ""For private storage account access, **Network Contributor** permissions are required on the Virtual Network and subnet. The subnet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp_elasticpremium.json","true" +"Cisco_Umbrella_cloudfirewall_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnectorelasticpremium","Cisco","Cisco Cloud Security (using elastic premium plan)","The Cisco Umbrella data connector provides the capability to ingest [Cisco Umbrella](https://docs.umbrella.com/) events stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Umbrella log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.

**NOTE:** This data connector uses the [Azure Functions Premium Plan](https://learn.microsoft.com/azure/azure-functions/functions-premium-plan?tabs=portal) to enable secure ingestion capabilities and will incur additional costs. More pricing details are [here](https://azure.microsoft.com/pricing/details/functions/?msockid=2f4366822d836a7c2ac673462cfc6ba8#pricing).","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [cisco umbrella log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Network Prerequisites for Private Access**\n\n>**IMPORTANT:** When deploying with private storage account access, ensure the following network prerequisites are met:\n> - **Virtual Network**: An existing Virtual Network (VNet) must be available\n> - **Subnet**: A dedicated subnet within the VNet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration\n> - **Subnet Delegation**: Configure the subnet delegation using Azure Portal, ARM template, or Azure CLI:\n> - Azure Portal: Go to Virtual networks \u2192 Select your VNet \u2192 Subnets \u2192 Select subnet \u2192 Delegate subnet to service \u2192 Choose **Microsoft.Web/serverFarms**\n> - Azure CLI: `az network vnet subnet update --resource-group --vnet-name --name --delegations Microsoft.Web/serverFarms`\n> - **Private Endpoints**: The deployment will create private endpoints for storage account services (blob, file, queue, table) within the same subnet""}, {""title"": """", ""description"": ""**STEP 2 - Configuration of the Cisco Umbrella logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Umbrella data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Umbrella data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n4. **For Private Access Deployment**: Also enter **existingVnetName**, **existingVnetResourceGroupName**, and **existingSubnetName** (ensure subnet is delegated to Microsoft.Web/serverFarms)\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n5. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n6. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Umbrella data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}, {""name"": ""Virtual Network permissions (for private access)"", ""description"": ""For private storage account access, **Network Contributor** permissions are required on the Virtual Network and subnet. The subnet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp_elasticpremium.json","true" +"Cisco_Umbrella_dlp_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnectorelasticpremium","Cisco","Cisco Cloud Security (using elastic premium plan)","The Cisco Umbrella data connector provides the capability to ingest [Cisco Umbrella](https://docs.umbrella.com/) events stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Umbrella log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.

**NOTE:** This data connector uses the [Azure Functions Premium Plan](https://learn.microsoft.com/azure/azure-functions/functions-premium-plan?tabs=portal) to enable secure ingestion capabilities and will incur additional costs. More pricing details are [here](https://azure.microsoft.com/pricing/details/functions/?msockid=2f4366822d836a7c2ac673462cfc6ba8#pricing).","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [cisco umbrella log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Network Prerequisites for Private Access**\n\n>**IMPORTANT:** When deploying with private storage account access, ensure the following network prerequisites are met:\n> - **Virtual Network**: An existing Virtual Network (VNet) must be available\n> - **Subnet**: A dedicated subnet within the VNet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration\n> - **Subnet Delegation**: Configure the subnet delegation using Azure Portal, ARM template, or Azure CLI:\n> - Azure Portal: Go to Virtual networks \u2192 Select your VNet \u2192 Subnets \u2192 Select subnet \u2192 Delegate subnet to service \u2192 Choose **Microsoft.Web/serverFarms**\n> - Azure CLI: `az network vnet subnet update --resource-group --vnet-name --name --delegations Microsoft.Web/serverFarms`\n> - **Private Endpoints**: The deployment will create private endpoints for storage account services (blob, file, queue, table) within the same subnet""}, {""title"": """", ""description"": ""**STEP 2 - Configuration of the Cisco Umbrella logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Umbrella data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Umbrella data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n4. **For Private Access Deployment**: Also enter **existingVnetName**, **existingVnetResourceGroupName**, and **existingSubnetName** (ensure subnet is delegated to Microsoft.Web/serverFarms)\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n5. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n6. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Umbrella data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}, {""name"": ""Virtual Network permissions (for private access)"", ""description"": ""For private storage account access, **Network Contributor** permissions are required on the Virtual Network and subnet. The subnet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp_elasticpremium.json","true" +"Cisco_Umbrella_dns_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnectorelasticpremium","Cisco","Cisco Cloud Security (using elastic premium plan)","The Cisco Umbrella data connector provides the capability to ingest [Cisco Umbrella](https://docs.umbrella.com/) events stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Umbrella log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.

**NOTE:** This data connector uses the [Azure Functions Premium Plan](https://learn.microsoft.com/azure/azure-functions/functions-premium-plan?tabs=portal) to enable secure ingestion capabilities and will incur additional costs. More pricing details are [here](https://azure.microsoft.com/pricing/details/functions/?msockid=2f4366822d836a7c2ac673462cfc6ba8#pricing).","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [cisco umbrella log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Network Prerequisites for Private Access**\n\n>**IMPORTANT:** When deploying with private storage account access, ensure the following network prerequisites are met:\n> - **Virtual Network**: An existing Virtual Network (VNet) must be available\n> - **Subnet**: A dedicated subnet within the VNet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration\n> - **Subnet Delegation**: Configure the subnet delegation using Azure Portal, ARM template, or Azure CLI:\n> - Azure Portal: Go to Virtual networks \u2192 Select your VNet \u2192 Subnets \u2192 Select subnet \u2192 Delegate subnet to service \u2192 Choose **Microsoft.Web/serverFarms**\n> - Azure CLI: `az network vnet subnet update --resource-group --vnet-name --name --delegations Microsoft.Web/serverFarms`\n> - **Private Endpoints**: The deployment will create private endpoints for storage account services (blob, file, queue, table) within the same subnet""}, {""title"": """", ""description"": ""**STEP 2 - Configuration of the Cisco Umbrella logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Umbrella data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Umbrella data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n4. **For Private Access Deployment**: Also enter **existingVnetName**, **existingVnetResourceGroupName**, and **existingSubnetName** (ensure subnet is delegated to Microsoft.Web/serverFarms)\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n5. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n6. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Umbrella data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}, {""name"": ""Virtual Network permissions (for private access)"", ""description"": ""For private storage account access, **Network Contributor** permissions are required on the Virtual Network and subnet. The subnet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp_elasticpremium.json","true" +"Cisco_Umbrella_fileevent_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnectorelasticpremium","Cisco","Cisco Cloud Security (using elastic premium plan)","The Cisco Umbrella data connector provides the capability to ingest [Cisco Umbrella](https://docs.umbrella.com/) events stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Umbrella log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.

**NOTE:** This data connector uses the [Azure Functions Premium Plan](https://learn.microsoft.com/azure/azure-functions/functions-premium-plan?tabs=portal) to enable secure ingestion capabilities and will incur additional costs. More pricing details are [here](https://azure.microsoft.com/pricing/details/functions/?msockid=2f4366822d836a7c2ac673462cfc6ba8#pricing).","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [cisco umbrella log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Network Prerequisites for Private Access**\n\n>**IMPORTANT:** When deploying with private storage account access, ensure the following network prerequisites are met:\n> - **Virtual Network**: An existing Virtual Network (VNet) must be available\n> - **Subnet**: A dedicated subnet within the VNet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration\n> - **Subnet Delegation**: Configure the subnet delegation using Azure Portal, ARM template, or Azure CLI:\n> - Azure Portal: Go to Virtual networks \u2192 Select your VNet \u2192 Subnets \u2192 Select subnet \u2192 Delegate subnet to service \u2192 Choose **Microsoft.Web/serverFarms**\n> - Azure CLI: `az network vnet subnet update --resource-group --vnet-name --name --delegations Microsoft.Web/serverFarms`\n> - **Private Endpoints**: The deployment will create private endpoints for storage account services (blob, file, queue, table) within the same subnet""}, {""title"": """", ""description"": ""**STEP 2 - Configuration of the Cisco Umbrella logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Umbrella data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Umbrella data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n4. **For Private Access Deployment**: Also enter **existingVnetName**, **existingVnetResourceGroupName**, and **existingSubnetName** (ensure subnet is delegated to Microsoft.Web/serverFarms)\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n5. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n6. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Umbrella data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}, {""name"": ""Virtual Network permissions (for private access)"", ""description"": ""For private storage account access, **Network Contributor** permissions are required on the Virtual Network and subnet. The subnet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp_elasticpremium.json","true" +"Cisco_Umbrella_firewall_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnectorelasticpremium","Cisco","Cisco Cloud Security (using elastic premium plan)","The Cisco Umbrella data connector provides the capability to ingest [Cisco Umbrella](https://docs.umbrella.com/) events stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Umbrella log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.

**NOTE:** This data connector uses the [Azure Functions Premium Plan](https://learn.microsoft.com/azure/azure-functions/functions-premium-plan?tabs=portal) to enable secure ingestion capabilities and will incur additional costs. More pricing details are [here](https://azure.microsoft.com/pricing/details/functions/?msockid=2f4366822d836a7c2ac673462cfc6ba8#pricing).","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [cisco umbrella log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Network Prerequisites for Private Access**\n\n>**IMPORTANT:** When deploying with private storage account access, ensure the following network prerequisites are met:\n> - **Virtual Network**: An existing Virtual Network (VNet) must be available\n> - **Subnet**: A dedicated subnet within the VNet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration\n> - **Subnet Delegation**: Configure the subnet delegation using Azure Portal, ARM template, or Azure CLI:\n> - Azure Portal: Go to Virtual networks \u2192 Select your VNet \u2192 Subnets \u2192 Select subnet \u2192 Delegate subnet to service \u2192 Choose **Microsoft.Web/serverFarms**\n> - Azure CLI: `az network vnet subnet update --resource-group --vnet-name --name --delegations Microsoft.Web/serverFarms`\n> - **Private Endpoints**: The deployment will create private endpoints for storage account services (blob, file, queue, table) within the same subnet""}, {""title"": """", ""description"": ""**STEP 2 - Configuration of the Cisco Umbrella logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Umbrella data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Umbrella data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n4. **For Private Access Deployment**: Also enter **existingVnetName**, **existingVnetResourceGroupName**, and **existingSubnetName** (ensure subnet is delegated to Microsoft.Web/serverFarms)\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n5. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n6. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Umbrella data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}, {""name"": ""Virtual Network permissions (for private access)"", ""description"": ""For private storage account access, **Network Contributor** permissions are required on the Virtual Network and subnet. The subnet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp_elasticpremium.json","true" +"Cisco_Umbrella_intrusion_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnectorelasticpremium","Cisco","Cisco Cloud Security (using elastic premium plan)","The Cisco Umbrella data connector provides the capability to ingest [Cisco Umbrella](https://docs.umbrella.com/) events stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Umbrella log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.

**NOTE:** This data connector uses the [Azure Functions Premium Plan](https://learn.microsoft.com/azure/azure-functions/functions-premium-plan?tabs=portal) to enable secure ingestion capabilities and will incur additional costs. More pricing details are [here](https://azure.microsoft.com/pricing/details/functions/?msockid=2f4366822d836a7c2ac673462cfc6ba8#pricing).","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [cisco umbrella log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Network Prerequisites for Private Access**\n\n>**IMPORTANT:** When deploying with private storage account access, ensure the following network prerequisites are met:\n> - **Virtual Network**: An existing Virtual Network (VNet) must be available\n> - **Subnet**: A dedicated subnet within the VNet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration\n> - **Subnet Delegation**: Configure the subnet delegation using Azure Portal, ARM template, or Azure CLI:\n> - Azure Portal: Go to Virtual networks \u2192 Select your VNet \u2192 Subnets \u2192 Select subnet \u2192 Delegate subnet to service \u2192 Choose **Microsoft.Web/serverFarms**\n> - Azure CLI: `az network vnet subnet update --resource-group --vnet-name --name --delegations Microsoft.Web/serverFarms`\n> - **Private Endpoints**: The deployment will create private endpoints for storage account services (blob, file, queue, table) within the same subnet""}, {""title"": """", ""description"": ""**STEP 2 - Configuration of the Cisco Umbrella logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Umbrella data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Umbrella data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n4. **For Private Access Deployment**: Also enter **existingVnetName**, **existingVnetResourceGroupName**, and **existingSubnetName** (ensure subnet is delegated to Microsoft.Web/serverFarms)\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n5. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n6. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Umbrella data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}, {""name"": ""Virtual Network permissions (for private access)"", ""description"": ""For private storage account access, **Network Contributor** permissions are required on the Virtual Network and subnet. The subnet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp_elasticpremium.json","true" +"Cisco_Umbrella_ip_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnectorelasticpremium","Cisco","Cisco Cloud Security (using elastic premium plan)","The Cisco Umbrella data connector provides the capability to ingest [Cisco Umbrella](https://docs.umbrella.com/) events stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Umbrella log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.

**NOTE:** This data connector uses the [Azure Functions Premium Plan](https://learn.microsoft.com/azure/azure-functions/functions-premium-plan?tabs=portal) to enable secure ingestion capabilities and will incur additional costs. More pricing details are [here](https://azure.microsoft.com/pricing/details/functions/?msockid=2f4366822d836a7c2ac673462cfc6ba8#pricing).","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [cisco umbrella log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Network Prerequisites for Private Access**\n\n>**IMPORTANT:** When deploying with private storage account access, ensure the following network prerequisites are met:\n> - **Virtual Network**: An existing Virtual Network (VNet) must be available\n> - **Subnet**: A dedicated subnet within the VNet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration\n> - **Subnet Delegation**: Configure the subnet delegation using Azure Portal, ARM template, or Azure CLI:\n> - Azure Portal: Go to Virtual networks \u2192 Select your VNet \u2192 Subnets \u2192 Select subnet \u2192 Delegate subnet to service \u2192 Choose **Microsoft.Web/serverFarms**\n> - Azure CLI: `az network vnet subnet update --resource-group --vnet-name --name --delegations Microsoft.Web/serverFarms`\n> - **Private Endpoints**: The deployment will create private endpoints for storage account services (blob, file, queue, table) within the same subnet""}, {""title"": """", ""description"": ""**STEP 2 - Configuration of the Cisco Umbrella logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Umbrella data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Umbrella data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n4. **For Private Access Deployment**: Also enter **existingVnetName**, **existingVnetResourceGroupName**, and **existingSubnetName** (ensure subnet is delegated to Microsoft.Web/serverFarms)\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n5. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n6. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Umbrella data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}, {""name"": ""Virtual Network permissions (for private access)"", ""description"": ""For private storage account access, **Network Contributor** permissions are required on the Virtual Network and subnet. The subnet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp_elasticpremium.json","true" +"Cisco_Umbrella_proxy_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnectorelasticpremium","Cisco","Cisco Cloud Security (using elastic premium plan)","The Cisco Umbrella data connector provides the capability to ingest [Cisco Umbrella](https://docs.umbrella.com/) events stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Umbrella log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.

**NOTE:** This data connector uses the [Azure Functions Premium Plan](https://learn.microsoft.com/azure/azure-functions/functions-premium-plan?tabs=portal) to enable secure ingestion capabilities and will incur additional costs. More pricing details are [here](https://azure.microsoft.com/pricing/details/functions/?msockid=2f4366822d836a7c2ac673462cfc6ba8#pricing).","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [cisco umbrella log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Network Prerequisites for Private Access**\n\n>**IMPORTANT:** When deploying with private storage account access, ensure the following network prerequisites are met:\n> - **Virtual Network**: An existing Virtual Network (VNet) must be available\n> - **Subnet**: A dedicated subnet within the VNet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration\n> - **Subnet Delegation**: Configure the subnet delegation using Azure Portal, ARM template, or Azure CLI:\n> - Azure Portal: Go to Virtual networks \u2192 Select your VNet \u2192 Subnets \u2192 Select subnet \u2192 Delegate subnet to service \u2192 Choose **Microsoft.Web/serverFarms**\n> - Azure CLI: `az network vnet subnet update --resource-group --vnet-name --name --delegations Microsoft.Web/serverFarms`\n> - **Private Endpoints**: The deployment will create private endpoints for storage account services (blob, file, queue, table) within the same subnet""}, {""title"": """", ""description"": ""**STEP 2 - Configuration of the Cisco Umbrella logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Umbrella data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Umbrella data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n4. **For Private Access Deployment**: Also enter **existingVnetName**, **existingVnetResourceGroupName**, and **existingSubnetName** (ensure subnet is delegated to Microsoft.Web/serverFarms)\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n5. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n6. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Umbrella data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}, {""name"": ""Virtual Network permissions (for private access)"", ""description"": ""For private storage account access, **Network Contributor** permissions are required on the Virtual Network and subnet. The subnet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp_elasticpremium.json","true" +"Cisco_Umbrella_ravpnlogs_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnectorelasticpremium","Cisco","Cisco Cloud Security (using elastic premium plan)","The Cisco Umbrella data connector provides the capability to ingest [Cisco Umbrella](https://docs.umbrella.com/) events stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Umbrella log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.

**NOTE:** This data connector uses the [Azure Functions Premium Plan](https://learn.microsoft.com/azure/azure-functions/functions-premium-plan?tabs=portal) to enable secure ingestion capabilities and will incur additional costs. More pricing details are [here](https://azure.microsoft.com/pricing/details/functions/?msockid=2f4366822d836a7c2ac673462cfc6ba8#pricing).","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [cisco umbrella log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Network Prerequisites for Private Access**\n\n>**IMPORTANT:** When deploying with private storage account access, ensure the following network prerequisites are met:\n> - **Virtual Network**: An existing Virtual Network (VNet) must be available\n> - **Subnet**: A dedicated subnet within the VNet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration\n> - **Subnet Delegation**: Configure the subnet delegation using Azure Portal, ARM template, or Azure CLI:\n> - Azure Portal: Go to Virtual networks \u2192 Select your VNet \u2192 Subnets \u2192 Select subnet \u2192 Delegate subnet to service \u2192 Choose **Microsoft.Web/serverFarms**\n> - Azure CLI: `az network vnet subnet update --resource-group --vnet-name --name --delegations Microsoft.Web/serverFarms`\n> - **Private Endpoints**: The deployment will create private endpoints for storage account services (blob, file, queue, table) within the same subnet""}, {""title"": """", ""description"": ""**STEP 2 - Configuration of the Cisco Umbrella logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Umbrella data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Umbrella data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n4. **For Private Access Deployment**: Also enter **existingVnetName**, **existingVnetResourceGroupName**, and **existingSubnetName** (ensure subnet is delegated to Microsoft.Web/serverFarms)\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n5. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n6. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Umbrella data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}, {""name"": ""Virtual Network permissions (for private access)"", ""description"": ""For private storage account access, **Network Contributor** permissions are required on the Virtual Network and subnet. The subnet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp_elasticpremium.json","true" +"Cisco_Umbrella_ztaflow_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnectorelasticpremium","Cisco","Cisco Cloud Security (using elastic premium plan)","The Cisco Umbrella data connector provides the capability to ingest [Cisco Umbrella](https://docs.umbrella.com/) events stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Umbrella log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.

**NOTE:** This data connector uses the [Azure Functions Premium Plan](https://learn.microsoft.com/azure/azure-functions/functions-premium-plan?tabs=portal) to enable secure ingestion capabilities and will incur additional costs. More pricing details are [here](https://azure.microsoft.com/pricing/details/functions/?msockid=2f4366822d836a7c2ac673462cfc6ba8#pricing).","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [cisco umbrella log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Network Prerequisites for Private Access**\n\n>**IMPORTANT:** When deploying with private storage account access, ensure the following network prerequisites are met:\n> - **Virtual Network**: An existing Virtual Network (VNet) must be available\n> - **Subnet**: A dedicated subnet within the VNet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration\n> - **Subnet Delegation**: Configure the subnet delegation using Azure Portal, ARM template, or Azure CLI:\n> - Azure Portal: Go to Virtual networks \u2192 Select your VNet \u2192 Subnets \u2192 Select subnet \u2192 Delegate subnet to service \u2192 Choose **Microsoft.Web/serverFarms**\n> - Azure CLI: `az network vnet subnet update --resource-group --vnet-name --name --delegations Microsoft.Web/serverFarms`\n> - **Private Endpoints**: The deployment will create private endpoints for storage account services (blob, file, queue, table) within the same subnet""}, {""title"": """", ""description"": ""**STEP 2 - Configuration of the Cisco Umbrella logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Umbrella data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Umbrella data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n4. **For Private Access Deployment**: Also enter **existingVnetName**, **existingVnetResourceGroupName**, and **existingSubnetName** (ensure subnet is delegated to Microsoft.Web/serverFarms)\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n5. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n6. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Umbrella data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}, {""name"": ""Virtual Network permissions (for private access)"", ""description"": ""For private storage account access, **Network Contributor** permissions are required on the Virtual Network and subnet. The subnet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp_elasticpremium.json","true" +"Cisco_Umbrella_ztna_CL","CiscoUmbrella","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella","azuresentinel","azure-sentinel-solution-ciscoumbrella","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","CiscoUmbrellaDataConnectorelasticpremium","Cisco","Cisco Cloud Security (using elastic premium plan)","The Cisco Umbrella data connector provides the capability to ingest [Cisco Umbrella](https://docs.umbrella.com/) events stored in Amazon S3 into Microsoft Sentinel using the Amazon S3 REST API. Refer to [Cisco Umbrella log management documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management) for more information.

**NOTE:** This data connector uses the [Azure Functions Premium Plan](https://learn.microsoft.com/azure/azure-functions/functions-premium-plan?tabs=portal) to enable secure ingestion capabilities and will incur additional costs. More pricing details are [here](https://azure.microsoft.com/pricing/details/functions/?msockid=2f4366822d836a7c2ac673462cfc6ba8#pricing).","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Amazon S3 REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated to support [cisco umbrella log schema version 14.](https://docs.umbrella.com/deployment-umbrella/docs/log-formats-and-versioning)""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Parsers/CiscoUmbrella/Cisco_Umbrella) to create the Kusto function alias **Cisco_Umbrella**.""}, {""title"": """", ""description"": ""**STEP 1 - Network Prerequisites for Private Access**\n\n>**IMPORTANT:** When deploying with private storage account access, ensure the following network prerequisites are met:\n> - **Virtual Network**: An existing Virtual Network (VNet) must be available\n> - **Subnet**: A dedicated subnet within the VNet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration\n> - **Subnet Delegation**: Configure the subnet delegation using Azure Portal, ARM template, or Azure CLI:\n> - Azure Portal: Go to Virtual networks \u2192 Select your VNet \u2192 Subnets \u2192 Select subnet \u2192 Delegate subnet to service \u2192 Choose **Microsoft.Web/serverFarms**\n> - Azure CLI: `az network vnet subnet update --resource-group --vnet-name --name --delegations Microsoft.Web/serverFarms`\n> - **Private Endpoints**: The deployment will create private endpoints for storage account services (blob, file, queue, table) within the same subnet""}, {""title"": """", ""description"": ""**STEP 2 - Configuration of the Cisco Umbrella logs collection**\n\n[See documentation](https://docs.umbrella.com/deployment-umbrella/docs/log-management#section-logging-to-amazon-s-3) and follow the instructions for set up logging and obtain credentials.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Cisco Umbrella data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Amazon S3 REST API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cisco Umbrella data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelciscoumbrellaelasticpremiumazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **S3Bucket**, **AWSAccessKeyId**, **AWSSecretAccessKey**\n4. **For Private Access Deployment**: Also enter **existingVnetName**, **existingVnetResourceGroupName**, and **existingSubnetName** (ensure subnet is delegated to Microsoft.Web/serverFarms)\n**Note:** For the S3Bucket use the value that Cisco referrs to as the _S3 Bucket Data Path_ and add a / (forward slash) to the end of the value\n5. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n6. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cisco Umbrella data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure Functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-CiscoUmbrellaConn-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeploymentWithPythonVersion3.9.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tS3Bucket\n\t\tAWSAccessKeyId\n\t\tAWSSecretAccessKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name** are required for Amazon S3 REST API.""}, {""name"": ""Virtual Network permissions (for private access)"", ""description"": ""For private storage account access, **Network Contributor** permissions are required on the Virtual Network and subnet. The subnet must be delegated to **Microsoft.Web/serverFarms** for Function App VNet integration.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoUmbrella/Data%20Connectors/CiscoUmbrella_API_FunctionApp_elasticpremium.json","true" +"Syslog","CiscoWSA","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoWSA","azuresentinel","azure-sentinel-solution-ciscowsa","2021-06-29","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CiscoWSA","Cisco","[Deprecated] Cisco Web Security Appliance","[Cisco Web Security Appliance (WSA)](https://www.cisco.com/c/en/us/products/security/web-security-appliance/index.html) data connector provides the capability to ingest [Cisco WSA Access Logs](https://www.cisco.com/c/en/us/td/docs/security/wsa/wsa_14-0/User-Guide/b_WSA_UserGuide_14_0/b_WSA_UserGuide_11_7_chapter_010101.html) into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**CiscoWSAEvent**](https://aka.ms/sentinel-CiscoWSA-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**NOTE:** This data connector has been developed using AsyncOS 14.0 for Cisco Web Security Appliance"", ""instructions"": []}, {""title"": ""1. Configure Cisco Web Security Appliance to forward logs via Syslog to remote server where you will install the agent."", ""description"": ""[Follow these steps](https://www.cisco.com/c/en/us/td/docs/security/esa/esa14-0/user_guide/b_ESA_Admin_Guide_14-0/b_ESA_Admin_Guide_12_1_chapter_0100111.html#con_1134718) to configure Cisco Web Security Appliance to forward logs via Syslog\n\n>**NOTE:** Select **Syslog Push** as a Retrieval Method.""}, {""title"": ""2. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Server to which the logs will be forwarded.\n\n> Logs on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""3. Check logs in Microsoft Sentinel"", ""description"": ""Open Log Analytics to check if the logs are received using the Syslog schema.\n\n>**NOTE:** It may take up to 15 minutes before new logs will appear in Syslog table."", ""instructions"": []}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CiscoWSA/Data%20Connectors/Connector_WSA_Syslog.json","true" +"Syslog","Citrix ADC","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20ADC","azuresentinel","azure-sentinel-solution-citrixadc","2022-06-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CitrixADC","Citrix","[Deprecated] Citrix ADC (former NetScaler)","The [Citrix ADC (former NetScaler)](https://www.citrix.com/products/citrix-adc/) data connector provides the capability to ingest Citrix ADC logs into Microsoft Sentinel. If you want to ingest Citrix WAF logs into Microsoft Sentinel, refer this [documentation](https://learn.microsoft.com/azure/sentinel/data-connectors/citrix-waf-web-app-firewall)","[{""title"": """", ""description"": "">**NOTE:** 1. This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias CitrixADCEvent and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20ADC/Parsers/CitrixADCEvent.yaml), this function maps Citrix ADC (former NetScaler) events to Advanced Security Information Model [ASIM](https://docs.microsoft.com/azure/sentinel/normalization). The function usually takes 10-15 minutes to activate after solution installation/update. \n\n>**NOTE:** 2. This parser requires a watchlist named **`Sources_by_SourceType`** \n\n> i. If you don't have watchlist already created, please click [here](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FASIM%2Fdeploy%2FWatchlists%2FASimSourceType.json) to create. \n\n> ii. Open watchlist **`Sources_by_SourceType`** and add entries for this data source.\n\n> iii. The SourceType value for CitrixADC is **`CitrixADC`**. \n\n> You can refer [this](https://learn.microsoft.com/en-us/azure/sentinel/normalization-manage-parsers?WT.mc_id=Portal-fx#configure-the-sources-relevant-to-a-source-specific-parser) documentation for more details"", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n 2. Select **Apply below configuration to my machines** and select the facilities and severities.\n 3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure Citrix ADC to forward logs via Syslog"", ""description"": ""3.1 Navigate to **Configuration tab > System > Auditing > Syslog > Servers tab**\n\n 3.2 Specify **Syslog action name**.\n\n 3.3 Set IP address of remote Syslog server and port.\n\n 3.4 Set **Transport type** as **TCP** or **UDP** depending on your remote Syslog server configuration.\n\n 3.5 You can refer Citrix ADC (former NetScaler) [documentation](https://docs.netscaler.com/) for more details.""}, {""title"": ""4. Check logs in Microsoft Sentinel"", ""description"": ""Open Log Analytics to check if the logs are received using the Syslog schema.\n\n>**NOTE:** It may take up to 15 minutes before new logs will appear in Syslog table."", ""instructions"": []}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20ADC/Data%20Connectors/Connector_CitrixADC_syslog.json","true" +"CitrixAnalytics_indicatorEventDetails_CL","Citrix Analytics for Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20Analytics%20for%20Security","citrix","citrix_analytics_for_security_mss","2022-05-06","","","Citrix Systems","Partner","https://www.citrix.com/support/","","domains","Citrix","CITRIX","CITRIX SECURITY ANALYTICS","Citrix Analytics (Security) integration with Microsoft Sentinel helps you to export data analyzed for risky events from Citrix Analytics (Security) into Microsoft Sentinel environment. You can create custom dashboards, analyze data from other sources along with that from Citrix Analytics (Security) and create custom workflows using Logic Apps to monitor and mitigate security events.","[{""title"": """", ""description"": ""To get access to this capability and the configuration steps on Citrix Analytics, please visit: [Connect Citrix to Microsoft Sentinel.](https://aka.ms/Sentinel-Citrix-Connector)\u200b\n"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Licensing"", ""description"": ""Entitlements to Citrix Security Analytics in Citrix Cloud. Please review [Citrix Tool License Agreement.](https://aka.ms/sentinel-citrixanalyticslicense-readme)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20Analytics%20for%20Security/Data%20Connectors/CitrixSecurityAnalytics.json","true" +"CitrixAnalytics_indicatorSummary_CL","Citrix Analytics for Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20Analytics%20for%20Security","citrix","citrix_analytics_for_security_mss","2022-05-06","","","Citrix Systems","Partner","https://www.citrix.com/support/","","domains","Citrix","CITRIX","CITRIX SECURITY ANALYTICS","Citrix Analytics (Security) integration with Microsoft Sentinel helps you to export data analyzed for risky events from Citrix Analytics (Security) into Microsoft Sentinel environment. You can create custom dashboards, analyze data from other sources along with that from Citrix Analytics (Security) and create custom workflows using Logic Apps to monitor and mitigate security events.","[{""title"": """", ""description"": ""To get access to this capability and the configuration steps on Citrix Analytics, please visit: [Connect Citrix to Microsoft Sentinel.](https://aka.ms/Sentinel-Citrix-Connector)\u200b\n"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Licensing"", ""description"": ""Entitlements to Citrix Security Analytics in Citrix Cloud. Please review [Citrix Tool License Agreement.](https://aka.ms/sentinel-citrixanalyticslicense-readme)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20Analytics%20for%20Security/Data%20Connectors/CitrixSecurityAnalytics.json","true" +"CitrixAnalytics_riskScoreChange_CL","Citrix Analytics for Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20Analytics%20for%20Security","citrix","citrix_analytics_for_security_mss","2022-05-06","","","Citrix Systems","Partner","https://www.citrix.com/support/","","domains","Citrix","CITRIX","CITRIX SECURITY ANALYTICS","Citrix Analytics (Security) integration with Microsoft Sentinel helps you to export data analyzed for risky events from Citrix Analytics (Security) into Microsoft Sentinel environment. You can create custom dashboards, analyze data from other sources along with that from Citrix Analytics (Security) and create custom workflows using Logic Apps to monitor and mitigate security events.","[{""title"": """", ""description"": ""To get access to this capability and the configuration steps on Citrix Analytics, please visit: [Connect Citrix to Microsoft Sentinel.](https://aka.ms/Sentinel-Citrix-Connector)\u200b\n"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Licensing"", ""description"": ""Entitlements to Citrix Security Analytics in Citrix Cloud. Please review [Citrix Tool License Agreement.](https://aka.ms/sentinel-citrixanalyticslicense-readme)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20Analytics%20for%20Security/Data%20Connectors/CitrixSecurityAnalytics.json","true" +"CitrixAnalytics_userProfile_CL","Citrix Analytics for Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20Analytics%20for%20Security","citrix","citrix_analytics_for_security_mss","2022-05-06","","","Citrix Systems","Partner","https://www.citrix.com/support/","","domains","Citrix","CITRIX","CITRIX SECURITY ANALYTICS","Citrix Analytics (Security) integration with Microsoft Sentinel helps you to export data analyzed for risky events from Citrix Analytics (Security) into Microsoft Sentinel environment. You can create custom dashboards, analyze data from other sources along with that from Citrix Analytics (Security) and create custom workflows using Logic Apps to monitor and mitigate security events.","[{""title"": """", ""description"": ""To get access to this capability and the configuration steps on Citrix Analytics, please visit: [Connect Citrix to Microsoft Sentinel.](https://aka.ms/Sentinel-Citrix-Connector)\u200b\n"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Licensing"", ""description"": ""Entitlements to Citrix Security Analytics in Citrix Cloud. Please review [Citrix Tool License Agreement.](https://aka.ms/sentinel-citrixanalyticslicense-readme)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20Analytics%20for%20Security/Data%20Connectors/CitrixSecurityAnalytics.json","true" +"CommonSecurityLog","Citrix Web App Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20Web%20App%20Firewall","citrix","citrix_waf_mss","2022-05-06","","","Citrix Systems","Partner","https://www.citrix.com/support/","","domains","CitrixWAF","Citrix Systems Inc.","[Deprecated] Citrix WAF (Web App Firewall) via Legacy Agent"," Citrix WAF (Web App Firewall) is an industry leading enterprise-grade WAF solution. Citrix WAF mitigates threats against your public-facing assets, including websites, apps, and APIs. From layer 3 to layer 7, Citrix WAF includes protections such as IP reputation, bot mitigation, defense against the OWASP Top 10 application threats, built-in signatures to protect against application stack vulnerabilities, and more.

Citrix WAF supports Common Event Format (CEF) which is an industry standard format on top of Syslog messages . By connecting Citrix WAF CEF logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Configure Citrix WAF to send Syslog messages in CEF format to the proxy machine using the steps below. \n\n1. Follow [this guide](https://support.citrix.com/article/CTX234174) to configure WAF.\n\n2. Follow [this guide](https://support.citrix.com/article/CTX136146) to configure CEF logs.\n\n3. Follow [this guide](https://docs.citrix.com/en-us/citrix-adc/13/system/audit-logging/configuring-audit-logging.html) to forward the logs to proxy . Make sure you to send the logs to port 514 TCP on the Linux machine's IP address.\n\n""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20Web%20App%20Firewall/Data%20Connectors/Citrix_WAF.json","true" +"CommonSecurityLog","Citrix Web App Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20Web%20App%20Firewall","citrix","citrix_waf_mss","2022-05-06","","","Citrix Systems","Partner","https://www.citrix.com/support/","","domains","CitrixWAFAma","Citrix Systems Inc.","[Deprecated] Citrix WAF (Web App Firewall) via AMA"," Citrix WAF (Web App Firewall) is an industry leading enterprise-grade WAF solution. Citrix WAF mitigates threats against your public-facing assets, including websites, apps, and APIs. From layer 3 to layer 7, Citrix WAF includes protections such as IP reputation, bot mitigation, defense against the OWASP Top 10 application threats, built-in signatures to protect against application stack vulnerabilities, and more.

Citrix WAF supports Common Event Format (CEF) which is an industry standard format on top of Syslog messages . By connecting Citrix WAF CEF logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Configure Citrix WAF to send Syslog messages in CEF format to the proxy machine using the steps below. \n\n1. Follow [this guide](https://support.citrix.com/article/CTX234174) to configure WAF.\n\n2. Follow [this guide](https://support.citrix.com/article/CTX136146) to configure CEF logs.\n\n3. Follow [this guide](https://docs.citrix.com/en-us/citrix-adc/13/system/audit-logging/configuring-audit-logging.html) to forward the logs to proxy . Make sure you to send the logs to port 514 TCP on the Linux machine's IP address.\n\n"", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Citrix%20Web%20App%20Firewall/Data%20Connectors/template_Citrix_WAFAMA.json","true" +"CommonSecurityLog","Claroty","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Claroty","azuresentinel","azure-sentinel-solution-claroty","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","Claroty","Claroty","[Deprecated] Claroty via Legacy Agent","The [Claroty](https://claroty.com/) data connector provides the capability to ingest [Continuous Threat Detection](https://claroty.com/resources/datasheets/continuous-threat-detection) and [Secure Remote Access](https://claroty.com/industrial-cybersecurity/sra) events into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**ClarotyEvent**](https://aka.ms/sentinel-claroty-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Configure Claroty to send logs using CEF"", ""description"": ""Configure log forwarding using CEF:\n\n1. Navigate to the **Syslog** section of the Configuration menu.\n\n2. Select **+Add**.\n\n3. In the **Add New Syslog Dialog** specify Remote Server **IP**, **Port**, **Protocol** and select **Message Format** - **CEF**.\n\n4. Choose **Save** to exit the **Add Syslog dialog**.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Claroty/Data%20Connectors/Connector_Claroty_CEF.json","true" +"CommonSecurityLog","Claroty","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Claroty","azuresentinel","azure-sentinel-solution-claroty","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","ClarotyAma","Claroty","[Deprecated] Claroty via AMA","The [Claroty](https://claroty.com/) data connector provides the capability to ingest [Continuous Threat Detection](https://claroty.com/resources/datasheets/continuous-threat-detection) and [Secure Remote Access](https://claroty.com/industrial-cybersecurity/sra) events into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**ClarotyEvent**](https://aka.ms/sentinel-claroty-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Configure Claroty to send logs using CEF"", ""description"": ""Configure log forwarding using CEF:\n\n1. Navigate to the **Syslog** section of the Configuration menu.\n\n2. Select **+Add**.\n\n3. In the **Add New Syslog Dialog** specify Remote Server **IP**, **Port**, **Protocol** and select **Message Format** - **CEF**.\n\n4. Choose **Save** to exit the **Add Syslog dialog**."", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Claroty/Data%20Connectors/template_ClarotyAMA.json","true" +"CommonSecurityLog","Claroty xDome","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Claroty%20xDome","claroty1709722359369","microsoft-sentinel-solution-xdome","2024-02-01","","","xDome Customer Support","Partner","https://claroty.com/support-policy","","domains,verticals","ClarotyxDome","Claroty","Claroty xDome","[Claroty](https://claroty.com/) xDome delivers comprehensive security and alert management capabilities for healthcare and industrial network environments. It is designed to map multiple source types, identify the collected data, and integrate it into Microsoft Sentinel data models. This results in the ability to monitor all potential threats in your healthcare and industrial environments in one location, leading to more effective security monitoring and a stronger security posture.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python --version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Configure the Claroty xDome - Microsoft Sentinel integration to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python --version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Claroty%20xDome/Data%20Connectors/Claroty_xDome.json","true" +"","Cloud Identity Threat Protection Essentials","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cloud%20Identity%20Threat%20Protection%20Essentials","azuresentinel","azure-sentinel-solution-cloudthreatdetection","2022-11-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","Cloud Service Threat Protection Essentials","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cloud%20Service%20Threat%20Protection%20Essentials","azuresentinel","azure-sentinel-solution-cloudservicedetection","2022-11-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"Cloudflare_CL","Cloudflare","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cloudflare","cloudflare","cloudflare_sentinel","2021-10-20","","","Cloudflare","Partner","https://support.cloudflare.com","","domains","CloudflareDataConnector","Cloudflare","[DEPRECATED] Cloudflare","The Cloudflare data connector provides the capability to ingest [Cloudflare logs](https://developers.cloudflare.com/logs/) into Microsoft Sentinel using the Cloudflare Logpush and Azure Blob Storage. Refer to [Cloudflare documentation](https://developers.cloudflare.com/logs/logpush) for more information.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Azure Blob Storage API to pull logs into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Cloudflare**](https://aka.ms/sentinel-CloudflareDataConnector-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration of the Cloudflare Logpush**\n\nSee documentation to [setup Cloudflare Logpush to Microsoft Azure](https://developers.cloudflare.com/logs/logpush/logpush-dashboard)""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Cloudflare data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as Azure Blob Storage connection string and container name, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cloudflare data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CloudflareDataConnector-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Azure Blob Storage Container Name**, **Azure Blob Storage Connection String**, **Microsoft Sentinel Workspace Id**, **Microsoft Sentinel Shared Key**\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cloudflare data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CloudflareDataConnector-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CloudflareXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tCONTAINER_NAME\n\t\tAZURE_STORAGE_CONNECTION_STRING\n\t\tWORKSPACE_ID\n\t\tSHARED_KEY\n\t\tlogAnalyticsUri (Optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://WORKSPACE_ID.ods.opinsights.azure.us`. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Azure Blob Storage connection string and container name"", ""description"": ""Azure Blob Storage connection string and container name where the logs are pushed to by Cloudflare Logpush. [See the documentation to learn more about creating Azure Blob Storage container.](https://learn.microsoft.com/azure/storage/blobs/storage-quickstart-blobs-portal)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cloudflare/Data%20Connectors/Cloudflare_API_FunctionApp.json","true" +"CloudflareV2_CL","Cloudflare","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cloudflare","cloudflare","cloudflare_sentinel","2021-10-20","","","Cloudflare","Partner","https://support.cloudflare.com","","domains","CloudflareDefinition","Microsoft","Cloudflare (Using Blob Container) (via Codeless Connector Framework)"," The Cloudflare data connector provides the capability to ingest Cloudflare logs into Microsoft Sentinel using the Cloudflare Logpush and Azure Blob Storage. Refer to [Cloudflare documentation](https://developers.cloudflare.com/logs/about/)for more information.","[{""title"": ""Connect Cloudflare Logs to Microsoft Sentinel"", ""description"": ""To enable Cloudflare logs for Microsoft Sentinel, provide the required information below and click on Connect.\n>"", ""instructions"": [{""parameters"": {""tenantId"": ""[subscription().tenantId]"", ""name"": ""principalId"", ""appId"": ""4f05ce56-95b6-4612-9d98-a45c8cc33f9f""}, ""type"": ""ServicePrincipalIDTextBox_test""}, {""parameters"": {""label"": ""The Blob container's URL you want to collect data from"", ""type"": ""text"", ""name"": ""blobContainerUri"", ""validations"": {""required"": true}}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""The Blob container's storage account resource group name"", ""type"": ""text"", ""name"": ""StorageAccountResourceGroupName"", ""validations"": {""required"": true}}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""The Blob container's storage account location"", ""type"": ""text"", ""name"": ""StorageAccountLocation"", ""validations"": {""required"": true}}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""The Blob container's storage account subscription id"", ""type"": ""text"", ""name"": ""StorageAccountSubscription"", ""validations"": {""required"": true}}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""The event grid topic name of the blob container's storage account if exist. else keep empty."", ""description"": ""The data flow using event grid to send 'blob-created event' notifications. There could be only one event grid topic for each storage account.\nGo to your blob container's storage account and look in the 'Events' section. If you already have a topic, please provide it's name. Else, keep the text box empty."", ""type"": ""text"", ""name"": ""EGSystemTopicName"", ""validations"": {""required"": false}}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}], ""customs"": [{""name"": ""Create a storage account and a container"", ""description"": ""Before setting up logpush in Cloudflare, first create a storage account and a container in Microsoft Azure. Use [this guide](https://learn.microsoft.com/en-us/azure/storage/blobs/storage-blobs-introduction) to know more about Container and Blob. Follow the steps in the [documentation](https://learn.microsoft.com/en-us/azure/storage/common/storage-account-create?tabs=azure-portal) to create an Azure Storage account.""}, {""name"": ""Generate a Blob SAS URL"", ""description"": ""Create and Write permissions are required. Refer the [documentation](https://learn.microsoft.com/en-us/azure/ai-services/translator/document-translation/how-to-guides/create-sas-tokens?tabs=Containers) to know more about Blob SAS token and url.""}, {""name"": ""Collecting logs from Cloudflare to your Blob container"", ""description"": ""Follow the steps in the [documentation](https://developers.cloudflare.com/logs/get-started/enable-destinations/azure/) for collecting logs from Cloudflare to your Blob container.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cloudflare/Data%20Connectors/CloudflareLog_CCF/CloudflareLog_ConnectorDefinition.json","true" +"CloudflareV2_CL","Cloudflare CCF","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cloudflare%20CCF","cloudflare","azure-sentinel-solution-cloudflare-ccf","2025-09-30","","","Cloudflare","Partner","https://support.cloudflare.com","","domains","CloudflareDefinition","Microsoft","Cloudflare (Using Blob Container) (via Codeless Connector Framework)"," The Cloudflare data connector provides the capability to ingest Cloudflare logs into Microsoft Sentinel using the Cloudflare Logpush and Azure Blob Storage. Refer to [Cloudflare documentation](https://developers.cloudflare.com/logs/about/)for more information.","[{""title"": ""Connect Cloudflare Logs to Microsoft Sentinel"", ""description"": ""To enable Cloudflare logs for Microsoft Sentinel, provide the required information below and click on Connect.\n>"", ""instructions"": [{""parameters"": {""tenantId"": ""[subscription().tenantId]"", ""name"": ""principalId"", ""appId"": ""4f05ce56-95b6-4612-9d98-a45c8cc33f9f""}, ""type"": ""ServicePrincipalIDTextBox_test""}, {""parameters"": {""label"": ""The Blob container's URL you want to collect data from"", ""type"": ""text"", ""name"": ""blobContainerUri"", ""validations"": {""required"": true}}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""The Blob container's storage account resource group name"", ""type"": ""text"", ""name"": ""StorageAccountResourceGroupName"", ""validations"": {""required"": true}}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""The Blob container's storage account location"", ""type"": ""text"", ""name"": ""StorageAccountLocation"", ""validations"": {""required"": true}}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""The Blob container's storage account subscription id"", ""type"": ""text"", ""name"": ""StorageAccountSubscription"", ""validations"": {""required"": true}}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""The event grid topic name of the blob container's storage account if exist. else keep empty."", ""description"": ""The data flow using event grid to send 'blob-created event' notifications. There could be only one event grid topic for each storage account.\nGo to your blob container's storage account and look in the 'Events' section. If you already have a topic, please provide it's name. Else, keep the text box empty."", ""type"": ""text"", ""name"": ""EGSystemTopicName"", ""validations"": {""required"": false}}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}], ""customs"": [{""name"": ""Create a storage account and a container"", ""description"": ""Before setting up logpush in Cloudflare, first create a storage account and a container in Microsoft Azure. Use [this guide](https://learn.microsoft.com/en-us/azure/storage/blobs/storage-blobs-introduction) to know more about Container and Blob. Follow the steps in the [documentation](https://learn.microsoft.com/en-us/azure/storage/common/storage-account-create?tabs=azure-portal) to create an Azure Storage account.""}, {""name"": ""Generate a Blob SAS URL"", ""description"": ""Create and Write permissions are required. Refer the [documentation](https://learn.microsoft.com/en-us/azure/ai-services/translator/document-translation/how-to-guides/create-sas-tokens?tabs=Containers) to know more about Blob SAS token and url.""}, {""name"": ""Collecting logs from Cloudflare to your Blob container"", ""description"": ""Follow the steps in the [documentation](https://developers.cloudflare.com/logs/get-started/enable-destinations/azure/) for collecting logs from Cloudflare to your Blob container.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cloudflare%20CCF/Data%20Connectors/CloudflareLog_CCF/CloudflareLog_ConnectorDefinition.json","true" +"Malware_Data_CL","CofenseIntelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CofenseIntelligence","cofense","cofense-intelligence-sentinel","2023-05-26","2024-05-26","","Cofense Support","Partner","https://cofense.com/contact-support/","","domains","CofenseIntelligence","Cofense","Cofense Intelligence Threat Indicators Ingestion","The [Cofense-Intelligence](https://cofense.com/product-services/phishing-intelligence/) data connector provides the following capabilities:
1. CofenseToSentinel :
>* Get Threat Indicators from the Cofense Intelligence platform and create Threat Intelligence Indicators in Microsoft Sentinel.
2. SentinelToDefender :
>* Get Malware from Cofense Intelligence and post to custom logs table.
3. CofenseIntelligenceMalware :
>* Get Cofense Intelligence Threat Intelligence Indicators from Microsoft Sentinel Threat Intelligence and create/update Indicators in Microsoft Defender for Endpoints.
4. DownloadThreatReports :
>* This data connector will fetch the malware data and create the Link from which we can download Threat Reports.
5. RetryFailedIndicators :
>* This data connector will fetch failed indicators from failed indicators file and retry creating/updating Threat Intelligence indicators in Microsoft Sentinel.


For more details of REST APIs refer to the below documentations:
1. Cofense Intelligence API documentation:
> https://www.threathq.com/docs/rest_api_reference.html
2. Microsoft Threat Intelligence Indicator documentation:
> https://learn.microsoft.com/rest/api/securityinsights/preview/threat-intelligence-indicator
3. Microsoft Defender for Endpoints Indicator documentation:
> https://learn.microsoft.com/microsoft-365/security/defender-endpoint/ti-indicator?view=o365-worldwide","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Cofense Intelligence APIs to pull its Threat Indicators and create Threat Intelligence Indicators into Microsoft Sentinel Threat Intelligence and create/update Threat Indicators in Cofense. Likewise, it also creates/updates Cofense Based Threat Indicators in Microsoft Defender for Endpoints. All this might result in additional indicator and data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Microsoft Azure Active Directory Application**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new Azure Active Directory application:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Azure Active Directory**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of CofenseIntelligence Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for Microsoft Azure Active Directory Application**\n\n Sometimes called an application password, a client secret is a string value required for the execution of CofenseIntelligence Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of CofenseIntelligence Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to Microsoft Azure Active Directory Application**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Assign Defender Threat Indicator permissions to Microsoft Azure Active Directory Application**\n\n Follow the steps in this section to assign the permissions:\n 1. In the Azure portal, in **App registrations**, select **your application**.\n 2. To enable an app to access Defender for Endpoint indicators, assign it **'Ti.ReadWrite.All'** permission, on your application page, select **API Permissions > Add permission > APIs my organization uses >, type WindowsDefenderATP, and then select WindowsDefenderATP**.\n 3. Select **Application permissions > Ti.ReadWrite.All**, and then select **Add permissions**.\n 4. Select **Grant consent**. \n\n> **Reference link:** [https://docs.microsoft.com/microsoft-365/security/defender-endpoint/exposed-apis-create-app-webapp?view=o365-worldwide](https://docs.microsoft.com/microsoft-365/security/defender-endpoint/exposed-apis-create-app-webapp?view=o365-worldwide)""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create/get Credentials for the Cofense Intelligence account** \n\n Follow the steps in this section to create/get **Cofense Username** and **Password**:\n 1. Login to https://threathq.com and go to the **Settings menu** on the left navigation bar.\n 2. Choose the API Tokens tab and select **Add a New Token**\n 3. Make sure to save the **password**, as it will not be accessible again.""}, {""title"": """", ""description"": ""**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Cofense Intelligence Threat Indicators data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Cofense API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cofense connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CofenseIntelligence-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tCofense BaseURL (https:///) \n\t\tCofense Username \n\t\tCofense Password \n\t\tAzure Client ID \n\t\tAzure Client Secret \n\t\tAzure Tenant ID \n\t\tAzure Resource Group Name \n\t\tAzure Workspace Name \n\t\tAzure Subscription ID \n\t\tRequireProxy \n\t\tProxy Username (optional) \n\t\tProxy Password (optional) \n\t\tProxy URL (optional) \n\t\tProxy Port (optional) \n\t\tLogLevel (optional) \n\t\tMalware_Data_Table_name\n\t\tSendCofenseIndicatorToDefender \n\t\tSchedule \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cofense Intelligence Threat Indicators data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CofenseIntelligence-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CofenseXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tCofense BaseURL (https:///) \n\t\tCofense Username \n\t\tCofense Password \n\t\tAzure Client ID \n\t\tAzure Client Secret \n\t\tAzure Tenant ID \n\t\tAzure Resource Group Name \n\t\tAzure Workspace Name \n\t\tFunction App Name \n\t\tAzure Subscription ID \n\t\tRequireProxy \n\t\tProxy Username (optional) \n\t\tProxy Password (optional) \n\t\tProxy URL (optional) \n\t\tProxy Port (optional) \n\t\tLogLevel (optional) \n\t\tMalware_Data_Table_name\n\t\tSendCofenseIndicatorToDefender \n\t\tSchedule \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Cofense Username** and **Password** is required. See the documentation to learn more about API on the [Rest API reference](https://www.threathq.com/docs/rest_api_reference.html)""}, {""name"": ""Microsoft Defender for Endpoints"", ""description"": ""**Microsoft Defender for Endpoints License** is required for SentinelToDefender function.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CofenseIntelligence/Data%20Connectors/CofenseIntelligenceDataConnector/CofenseIntelligence_API_FunctionApp.json","true" +"ThreatIntelligenceIndicator","CofenseIntelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CofenseIntelligence","cofense","cofense-intelligence-sentinel","2023-05-26","2024-05-26","","Cofense Support","Partner","https://cofense.com/contact-support/","","domains","CofenseIntelligence","Cofense","Cofense Intelligence Threat Indicators Ingestion","The [Cofense-Intelligence](https://cofense.com/product-services/phishing-intelligence/) data connector provides the following capabilities:
1. CofenseToSentinel :
>* Get Threat Indicators from the Cofense Intelligence platform and create Threat Intelligence Indicators in Microsoft Sentinel.
2. SentinelToDefender :
>* Get Malware from Cofense Intelligence and post to custom logs table.
3. CofenseIntelligenceMalware :
>* Get Cofense Intelligence Threat Intelligence Indicators from Microsoft Sentinel Threat Intelligence and create/update Indicators in Microsoft Defender for Endpoints.
4. DownloadThreatReports :
>* This data connector will fetch the malware data and create the Link from which we can download Threat Reports.
5. RetryFailedIndicators :
>* This data connector will fetch failed indicators from failed indicators file and retry creating/updating Threat Intelligence indicators in Microsoft Sentinel.


For more details of REST APIs refer to the below documentations:
1. Cofense Intelligence API documentation:
> https://www.threathq.com/docs/rest_api_reference.html
2. Microsoft Threat Intelligence Indicator documentation:
> https://learn.microsoft.com/rest/api/securityinsights/preview/threat-intelligence-indicator
3. Microsoft Defender for Endpoints Indicator documentation:
> https://learn.microsoft.com/microsoft-365/security/defender-endpoint/ti-indicator?view=o365-worldwide","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Cofense Intelligence APIs to pull its Threat Indicators and create Threat Intelligence Indicators into Microsoft Sentinel Threat Intelligence and create/update Threat Indicators in Cofense. Likewise, it also creates/updates Cofense Based Threat Indicators in Microsoft Defender for Endpoints. All this might result in additional indicator and data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Microsoft Azure Active Directory Application**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new Azure Active Directory application:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Azure Active Directory**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of CofenseIntelligence Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for Microsoft Azure Active Directory Application**\n\n Sometimes called an application password, a client secret is a string value required for the execution of CofenseIntelligence Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of CofenseIntelligence Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to Microsoft Azure Active Directory Application**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Assign Defender Threat Indicator permissions to Microsoft Azure Active Directory Application**\n\n Follow the steps in this section to assign the permissions:\n 1. In the Azure portal, in **App registrations**, select **your application**.\n 2. To enable an app to access Defender for Endpoint indicators, assign it **'Ti.ReadWrite.All'** permission, on your application page, select **API Permissions > Add permission > APIs my organization uses >, type WindowsDefenderATP, and then select WindowsDefenderATP**.\n 3. Select **Application permissions > Ti.ReadWrite.All**, and then select **Add permissions**.\n 4. Select **Grant consent**. \n\n> **Reference link:** [https://docs.microsoft.com/microsoft-365/security/defender-endpoint/exposed-apis-create-app-webapp?view=o365-worldwide](https://docs.microsoft.com/microsoft-365/security/defender-endpoint/exposed-apis-create-app-webapp?view=o365-worldwide)""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create/get Credentials for the Cofense Intelligence account** \n\n Follow the steps in this section to create/get **Cofense Username** and **Password**:\n 1. Login to https://threathq.com and go to the **Settings menu** on the left navigation bar.\n 2. Choose the API Tokens tab and select **Add a New Token**\n 3. Make sure to save the **password**, as it will not be accessible again.""}, {""title"": """", ""description"": ""**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Cofense Intelligence Threat Indicators data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Cofense API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cofense connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CofenseIntelligence-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tCofense BaseURL (https:///) \n\t\tCofense Username \n\t\tCofense Password \n\t\tAzure Client ID \n\t\tAzure Client Secret \n\t\tAzure Tenant ID \n\t\tAzure Resource Group Name \n\t\tAzure Workspace Name \n\t\tAzure Subscription ID \n\t\tRequireProxy \n\t\tProxy Username (optional) \n\t\tProxy Password (optional) \n\t\tProxy URL (optional) \n\t\tProxy Port (optional) \n\t\tLogLevel (optional) \n\t\tMalware_Data_Table_name\n\t\tSendCofenseIndicatorToDefender \n\t\tSchedule \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cofense Intelligence Threat Indicators data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CofenseIntelligence-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CofenseXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tCofense BaseURL (https:///) \n\t\tCofense Username \n\t\tCofense Password \n\t\tAzure Client ID \n\t\tAzure Client Secret \n\t\tAzure Tenant ID \n\t\tAzure Resource Group Name \n\t\tAzure Workspace Name \n\t\tFunction App Name \n\t\tAzure Subscription ID \n\t\tRequireProxy \n\t\tProxy Username (optional) \n\t\tProxy Password (optional) \n\t\tProxy URL (optional) \n\t\tProxy Port (optional) \n\t\tLogLevel (optional) \n\t\tMalware_Data_Table_name\n\t\tSendCofenseIndicatorToDefender \n\t\tSchedule \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Cofense Username** and **Password** is required. See the documentation to learn more about API on the [Rest API reference](https://www.threathq.com/docs/rest_api_reference.html)""}, {""name"": ""Microsoft Defender for Endpoints"", ""description"": ""**Microsoft Defender for Endpoints License** is required for SentinelToDefender function.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CofenseIntelligence/Data%20Connectors/CofenseIntelligenceDataConnector/CofenseIntelligence_API_FunctionApp.json","true" +"Cofense_Triage_failed_indicators_CL","CofenseTriage","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CofenseTriage","cofense","cofense-triage-sentinel","2023-03-24","2023-03-24","","Cofense Support","Partner","https://cofense.com/contact-support/","","domains","CofenseTriage","Cofense","Cofense Triage Threat Indicators Ingestion","The [Cofense-Triage](https://cofense.com/product-services/cofense-triage/) data connector provides the following capabilities:
1. CofenseBasedIndicatorCreator :
>* Get Threat Indicators from the Cofense Triage platform and create Threat Intelligence Indicators in Microsoft Sentinel.
> * Ingest Cofense Indicator ID and report links into custom logs table.
2. NonCofenseBasedIndicatorCreatorToCofense :
>* Get Non-Cofense Threat Intelligence Indicators from Microsoft Sentinel Threat Intelligence and create/update Indicators in Cofense Triage platform.
3. IndicatorCreatorToDefender :
>* Get Cofense Triage Threat Intelligence Indicators from Microsoft Sentinel Threat Intelligence and create/update Indicators in Microsoft Defender for Endpoints.
4. RetryFailedIndicators :
>* Get failed indicators from failed indicators files and retry creating/updating Threat Intelligence indicators in Microsoft Sentinel.


For more details of REST APIs refer to the below two documentations:
1. Cofense API documentation:
> https://``/docs/api/v2/index.html
2. Microsoft Threat Intelligence Indicator documentation:
> https://learn.microsoft.com/rest/api/securityinsights/preview/threat-intelligence-indicator
3. Microsoft Defender for Endpoints Indicator documentation:
> https://learn.microsoft.com/microsoft-365/security/defender-endpoint/ti-indicator?view=o365-worldwide","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Cofense APIs to pull its Threat Indicators and create Threat Intelligence Indicators into Microsoft Sentinel Threat Intelligence and pulls Non-Cofense Threat Intelligence Indicators from Microsoft Sentinel and create/update Threat Indicators in Cofense. Likewise, it also creates/updates Cofense Based Threat Indicators in Microsoft Defender for Endpoints. All this might result in additional indicator and data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Microsoft Azure Active Directory Application**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new Azure Active Directory application:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Azure Active Directory**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of CofenseTriage Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for Microsoft Azure Active Directory Application**\n\n Sometimes called an application password, a client secret is a string value required for the execution of CofenseTriage Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of CofenseTriage Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to Microsoft Azure Active Directory Application**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Assign Defender Threat Indicator permissions to Microsoft Azure Active Directory Application**\n\n Follow the steps in this section to assign the permissions:\n 1. In the Azure portal, in **App registrations**, select **your application**.\n 2. To enable an app to access Defender for Endpoint indicators, assign it **'Ti.ReadWrite.All'** permission, on your application page, select **API Permissions > Add permission > APIs my organization uses >, type WindowsDefenderATP, and then select WindowsDefenderATP**.\n 3. Select **Application permissions > Ti.ReadWrite.All**, and then select **Add permissions**.\n 4. Select **Grant consent**. \n\n> **Reference link:** [https://docs.microsoft.com/microsoft-365/security/defender-endpoint/exposed-apis-create-app-webapp?view=o365-worldwide](https://docs.microsoft.com/microsoft-365/security/defender-endpoint/exposed-apis-create-app-webapp?view=o365-worldwide)""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create/get Credentials for the Cofense Triage account** \n\n Follow the steps in this section to create/get **Cofense Client ID** and **Client Secret**:\n 1. Go to **Administration > API Management > Version 2 tab > Applications**\n 2. Click on **New Application**\n 3. Add the required information and click on **submit**.""}, {""title"": """", ""description"": ""**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Cofense Triage Threat Indicators data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Cofense API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cofense connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CofenseTriage-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tCofense URL (https:///) \n\t\tCofense Client ID \n\t\tCofense Client Secret \n\t\tAzure Client ID \n\t\tAzure Client Secret \n\t\tAzure Tenant ID \n\t\tAzure Resource Group Name \n\t\tAzure Workspace Name \n\t\tAzure Subscription ID \n\t\tThreat Level \n\t\tProxy Username (optional) \n\t\tProxy Password (optional) \n\t\tProxy URL (optional) \n\t\tProxy Port (optional) \n\t\tThrottle Limit for Non-Cofense Indicators (optional) \n\t\tLogLevel (optional) \n\t\tReports Table Name \n\t\tSchedule \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cofense Triage Threat Indicators data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CofenseThreatIndicatorsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CofenseXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tCofense URL (https:///) \n\t\tCofense Client ID \n\t\tCofense Client Secret \n\t\tAzure Client ID \n\t\tAzure Client Secret \n\t\tAzure Tenant ID \n\t\tAzure Resource Group Name \n\t\tAzure Workspace Name \n\t\tAzure Subscription ID \n\t\tThreat Level \n\t\tProxy Username (optional) \n\t\tProxy Password (optional) \n\t\tProxy URL (optional) \n\t\tProxy Port (optional) \n\t\tThrottle Limit for Non-Cofense Indicators (optional) \n\t\tLogLevel (optional) \n\t\tReports Table Name \n\t\tSchedule \n\t\tlogAnalyticsUri (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Cofense Client ID** and **Client Secret** is required. See the documentation to learn more about API on the `https:///docs/api/v2/index.html`""}, {""name"": ""Microsoft Defender for Endpoints"", ""description"": ""**Microsoft Defender for Endpoints License** is required for IndicatorCreatorToDefender function.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CofenseTriage/Data%20Connectors/CofenseTriageDataConnector/CofenseTriage_API_FunctionApp.json","true" +"Report_links_data_CL","CofenseTriage","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CofenseTriage","cofense","cofense-triage-sentinel","2023-03-24","2023-03-24","","Cofense Support","Partner","https://cofense.com/contact-support/","","domains","CofenseTriage","Cofense","Cofense Triage Threat Indicators Ingestion","The [Cofense-Triage](https://cofense.com/product-services/cofense-triage/) data connector provides the following capabilities:
1. CofenseBasedIndicatorCreator :
>* Get Threat Indicators from the Cofense Triage platform and create Threat Intelligence Indicators in Microsoft Sentinel.
> * Ingest Cofense Indicator ID and report links into custom logs table.
2. NonCofenseBasedIndicatorCreatorToCofense :
>* Get Non-Cofense Threat Intelligence Indicators from Microsoft Sentinel Threat Intelligence and create/update Indicators in Cofense Triage platform.
3. IndicatorCreatorToDefender :
>* Get Cofense Triage Threat Intelligence Indicators from Microsoft Sentinel Threat Intelligence and create/update Indicators in Microsoft Defender for Endpoints.
4. RetryFailedIndicators :
>* Get failed indicators from failed indicators files and retry creating/updating Threat Intelligence indicators in Microsoft Sentinel.


For more details of REST APIs refer to the below two documentations:
1. Cofense API documentation:
> https://``/docs/api/v2/index.html
2. Microsoft Threat Intelligence Indicator documentation:
> https://learn.microsoft.com/rest/api/securityinsights/preview/threat-intelligence-indicator
3. Microsoft Defender for Endpoints Indicator documentation:
> https://learn.microsoft.com/microsoft-365/security/defender-endpoint/ti-indicator?view=o365-worldwide","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Cofense APIs to pull its Threat Indicators and create Threat Intelligence Indicators into Microsoft Sentinel Threat Intelligence and pulls Non-Cofense Threat Intelligence Indicators from Microsoft Sentinel and create/update Threat Indicators in Cofense. Likewise, it also creates/updates Cofense Based Threat Indicators in Microsoft Defender for Endpoints. All this might result in additional indicator and data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Microsoft Azure Active Directory Application**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new Azure Active Directory application:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Azure Active Directory**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of CofenseTriage Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for Microsoft Azure Active Directory Application**\n\n Sometimes called an application password, a client secret is a string value required for the execution of CofenseTriage Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of CofenseTriage Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to Microsoft Azure Active Directory Application**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Assign Defender Threat Indicator permissions to Microsoft Azure Active Directory Application**\n\n Follow the steps in this section to assign the permissions:\n 1. In the Azure portal, in **App registrations**, select **your application**.\n 2. To enable an app to access Defender for Endpoint indicators, assign it **'Ti.ReadWrite.All'** permission, on your application page, select **API Permissions > Add permission > APIs my organization uses >, type WindowsDefenderATP, and then select WindowsDefenderATP**.\n 3. Select **Application permissions > Ti.ReadWrite.All**, and then select **Add permissions**.\n 4. Select **Grant consent**. \n\n> **Reference link:** [https://docs.microsoft.com/microsoft-365/security/defender-endpoint/exposed-apis-create-app-webapp?view=o365-worldwide](https://docs.microsoft.com/microsoft-365/security/defender-endpoint/exposed-apis-create-app-webapp?view=o365-worldwide)""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create/get Credentials for the Cofense Triage account** \n\n Follow the steps in this section to create/get **Cofense Client ID** and **Client Secret**:\n 1. Go to **Administration > API Management > Version 2 tab > Applications**\n 2. Click on **New Application**\n 3. Add the required information and click on **submit**.""}, {""title"": """", ""description"": ""**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Cofense Triage Threat Indicators data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Cofense API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cofense connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CofenseTriage-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tCofense URL (https:///) \n\t\tCofense Client ID \n\t\tCofense Client Secret \n\t\tAzure Client ID \n\t\tAzure Client Secret \n\t\tAzure Tenant ID \n\t\tAzure Resource Group Name \n\t\tAzure Workspace Name \n\t\tAzure Subscription ID \n\t\tThreat Level \n\t\tProxy Username (optional) \n\t\tProxy Password (optional) \n\t\tProxy URL (optional) \n\t\tProxy Port (optional) \n\t\tThrottle Limit for Non-Cofense Indicators (optional) \n\t\tLogLevel (optional) \n\t\tReports Table Name \n\t\tSchedule \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cofense Triage Threat Indicators data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CofenseThreatIndicatorsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CofenseXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tCofense URL (https:///) \n\t\tCofense Client ID \n\t\tCofense Client Secret \n\t\tAzure Client ID \n\t\tAzure Client Secret \n\t\tAzure Tenant ID \n\t\tAzure Resource Group Name \n\t\tAzure Workspace Name \n\t\tAzure Subscription ID \n\t\tThreat Level \n\t\tProxy Username (optional) \n\t\tProxy Password (optional) \n\t\tProxy URL (optional) \n\t\tProxy Port (optional) \n\t\tThrottle Limit for Non-Cofense Indicators (optional) \n\t\tLogLevel (optional) \n\t\tReports Table Name \n\t\tSchedule \n\t\tlogAnalyticsUri (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Cofense Client ID** and **Client Secret** is required. See the documentation to learn more about API on the `https:///docs/api/v2/index.html`""}, {""name"": ""Microsoft Defender for Endpoints"", ""description"": ""**Microsoft Defender for Endpoints License** is required for IndicatorCreatorToDefender function.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CofenseTriage/Data%20Connectors/CofenseTriageDataConnector/CofenseTriage_API_FunctionApp.json","true" +"ThreatIntelligenceIndicator","CofenseTriage","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CofenseTriage","cofense","cofense-triage-sentinel","2023-03-24","2023-03-24","","Cofense Support","Partner","https://cofense.com/contact-support/","","domains","CofenseTriage","Cofense","Cofense Triage Threat Indicators Ingestion","The [Cofense-Triage](https://cofense.com/product-services/cofense-triage/) data connector provides the following capabilities:
1. CofenseBasedIndicatorCreator :
>* Get Threat Indicators from the Cofense Triage platform and create Threat Intelligence Indicators in Microsoft Sentinel.
> * Ingest Cofense Indicator ID and report links into custom logs table.
2. NonCofenseBasedIndicatorCreatorToCofense :
>* Get Non-Cofense Threat Intelligence Indicators from Microsoft Sentinel Threat Intelligence and create/update Indicators in Cofense Triage platform.
3. IndicatorCreatorToDefender :
>* Get Cofense Triage Threat Intelligence Indicators from Microsoft Sentinel Threat Intelligence and create/update Indicators in Microsoft Defender for Endpoints.
4. RetryFailedIndicators :
>* Get failed indicators from failed indicators files and retry creating/updating Threat Intelligence indicators in Microsoft Sentinel.


For more details of REST APIs refer to the below two documentations:
1. Cofense API documentation:
> https://``/docs/api/v2/index.html
2. Microsoft Threat Intelligence Indicator documentation:
> https://learn.microsoft.com/rest/api/securityinsights/preview/threat-intelligence-indicator
3. Microsoft Defender for Endpoints Indicator documentation:
> https://learn.microsoft.com/microsoft-365/security/defender-endpoint/ti-indicator?view=o365-worldwide","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Cofense APIs to pull its Threat Indicators and create Threat Intelligence Indicators into Microsoft Sentinel Threat Intelligence and pulls Non-Cofense Threat Intelligence Indicators from Microsoft Sentinel and create/update Threat Indicators in Cofense. Likewise, it also creates/updates Cofense Based Threat Indicators in Microsoft Defender for Endpoints. All this might result in additional indicator and data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Microsoft Azure Active Directory Application**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new Azure Active Directory application:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Azure Active Directory**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of CofenseTriage Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for Microsoft Azure Active Directory Application**\n\n Sometimes called an application password, a client secret is a string value required for the execution of CofenseTriage Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of CofenseTriage Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to Microsoft Azure Active Directory Application**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Assign Defender Threat Indicator permissions to Microsoft Azure Active Directory Application**\n\n Follow the steps in this section to assign the permissions:\n 1. In the Azure portal, in **App registrations**, select **your application**.\n 2. To enable an app to access Defender for Endpoint indicators, assign it **'Ti.ReadWrite.All'** permission, on your application page, select **API Permissions > Add permission > APIs my organization uses >, type WindowsDefenderATP, and then select WindowsDefenderATP**.\n 3. Select **Application permissions > Ti.ReadWrite.All**, and then select **Add permissions**.\n 4. Select **Grant consent**. \n\n> **Reference link:** [https://docs.microsoft.com/microsoft-365/security/defender-endpoint/exposed-apis-create-app-webapp?view=o365-worldwide](https://docs.microsoft.com/microsoft-365/security/defender-endpoint/exposed-apis-create-app-webapp?view=o365-worldwide)""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create/get Credentials for the Cofense Triage account** \n\n Follow the steps in this section to create/get **Cofense Client ID** and **Client Secret**:\n 1. Go to **Administration > API Management > Version 2 tab > Applications**\n 2. Click on **New Application**\n 3. Add the required information and click on **submit**.""}, {""title"": """", ""description"": ""**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Cofense Triage Threat Indicators data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Cofense API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cofense connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CofenseTriage-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tCofense URL (https:///) \n\t\tCofense Client ID \n\t\tCofense Client Secret \n\t\tAzure Client ID \n\t\tAzure Client Secret \n\t\tAzure Tenant ID \n\t\tAzure Resource Group Name \n\t\tAzure Workspace Name \n\t\tAzure Subscription ID \n\t\tThreat Level \n\t\tProxy Username (optional) \n\t\tProxy Password (optional) \n\t\tProxy URL (optional) \n\t\tProxy Port (optional) \n\t\tThrottle Limit for Non-Cofense Indicators (optional) \n\t\tLogLevel (optional) \n\t\tReports Table Name \n\t\tSchedule \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cofense Triage Threat Indicators data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CofenseThreatIndicatorsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CofenseXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tCofense URL (https:///) \n\t\tCofense Client ID \n\t\tCofense Client Secret \n\t\tAzure Client ID \n\t\tAzure Client Secret \n\t\tAzure Tenant ID \n\t\tAzure Resource Group Name \n\t\tAzure Workspace Name \n\t\tAzure Subscription ID \n\t\tThreat Level \n\t\tProxy Username (optional) \n\t\tProxy Password (optional) \n\t\tProxy URL (optional) \n\t\tProxy Port (optional) \n\t\tThrottle Limit for Non-Cofense Indicators (optional) \n\t\tLogLevel (optional) \n\t\tReports Table Name \n\t\tSchedule \n\t\tlogAnalyticsUri (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Cofense Client ID** and **Client Secret** is required. See the documentation to learn more about API on the `https:///docs/api/v2/index.html`""}, {""name"": ""Microsoft Defender for Endpoints"", ""description"": ""**Microsoft Defender for Endpoints License** is required for IndicatorCreatorToDefender function.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CofenseTriage/Data%20Connectors/CofenseTriageDataConnector/CofenseTriage_API_FunctionApp.json","true" +"CognniIncidents_CL","Cognni","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cognni","shieldox","cognni_for_microsoft_sentinel","2022-05-06","","","Cognni","Partner","https://cognni.ai/contact-support/","","domains","CognniSentinelDataConnector","Cognni","Cognni","The Cognni connector offers a quick and simple integration with Microsoft Sentinel. You can use Cognni to autonomously map your previously unclassified important information and detect related incidents. This allows you to recognize risks to your important information, understand the severity of the incidents, and investigate the details you need to remediate, fast enough to make a difference.","[{""title"": ""Connect to Cognni"", ""description"": ""1. Go to [Cognni integrations page](https://intelligence.cognni.ai/integrations)\n2. Click **'Connect'** on the 'Microsoft Sentinel' box\n3. Copy and paste **'workspaceId'** and **'sharedKey'** (from below) to the related fields on Cognni's integrations screen\n4. Click the **'Connect'** botton to complete the configuration. \n Soon, all your Cognni-detected incidents will be forwarded here (into Microsoft Sentinel)\n\nNot a Cognni user? [Join us](https://azuremarketplace.microsoft.com/en-us/marketplace/apps/shieldox.appsource_freetrial)"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Shared Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cognni/Data%20Connectors/CognniSentinelConnector.json","true" +"ThreatIntelligenceIndicator","CognyteLuminar","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CognyteLuminar","cognytetechnologiesisraelltd","microsoft-sentinel-solution-cognyte-luminar","2023-09-15","","","Cognyte Luminar","Partner","https://www.cognyte.com/contact/","","domains","CognyteLuminar","Cognyte Technologies Israel Ltd","Luminar IOCs and Leaked Credentials","Luminar IOCs and Leaked Credentials connector allows integration of intelligence-based IOC data and customer-related leaked records identified by Luminar.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Cognyte Luminar API to pull Luminar IOCs and Leaked Credentials into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template for Flex Consumption Plan"", ""description"": ""Use this method for automated deployment of the data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CognyteLuminar-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Application ID**, **Tenant ID**,**Client Secret**, **Luminar API Client ID**, **Luminar API Account ID**, **Luminar API Client Secret**, **Luminar Initial Fetch Date**, **TimeInterval** and deploy.\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template for Premium Plan"", ""description"": ""Use this method for automated deployment of the data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CognyteLuminar-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Application ID**, **Tenant ID**,**Client Secret**, **Luminar API Client ID**, **Luminar API Account ID**, **Luminar API Client Secret**, **Luminar Initial Fetch Date**, **TimeInterval** and deploy.\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cognyte Luminar data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> NOTE:You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CognyteLuminar-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CognyteLuminarXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\\n\\n1. In the Function App, select the Function App Name and select **Configuration**.\\n2. In the **Application settings** tab, select **+ New application setting**.\\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \\n\\tApplication ID\\n\\tTenant ID\\n\\tClient Secret\\n\\tLuminar API Client ID\\n\\tLuminar API Account ID\\n\\tLuminar API Client Secret\\n\\tLuminar Initial Fetch Date\\n\\tTimeInterval - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`\\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Luminar Client ID**, **Luminar Client Secret** and **Luminar Account ID** are required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CognyteLuminar/Data%20Connectors/CognyteLuminar_FunctionApp.json","true" +"Cohesity_CL","CohesitySecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CohesitySecurity","cohesitydev1592001764720","cohesity_sentinel_data_connector","2022-10-10","","","Cohesity","Partner","https://support.cohesity.com/","","domains","CohesityDataConnector","Cohesity","Cohesity","The Cohesity function apps provide the ability to ingest Cohesity Datahawk ransomware alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions that connect to the Azure Blob Storage and KeyVault. This might result in additional costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/), [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) and [Azure KeyVault pricing page](https://azure.microsoft.com/pricing/details/key-vault/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Get a Cohesity DataHawk API key (see troubleshooting [instruction 1](https://github.com/Azure/Azure-Sentinel/tree/master/Solutions/CohesitySecurity/Data%20Connectors/Helios2Sentinel/IncidentProducer))**""}, {""title"": """", ""description"": ""**STEP 2 - Register Azure app ([link](https://portal.azure.com/#view/Microsoft_AAD_IAM/ActiveDirectoryMenuBlade/~/RegisteredApps)) and save Application (client) ID, Directory (tenant) ID, and Secret Value ([instructions](https://learn.microsoft.com/en-us/azure/healthcare-apis/register-application)). Grant it Azure Storage (user_impersonation) permission. Also, assign the 'Microsoft Sentinel Contributor' role to the application in the appropriate subscription.**""}, {""title"": """", ""description"": ""**STEP 3 - Deploy the connector and the associated Azure Functions**.""}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cohesity data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-Cohesity-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the parameters that you created at the previous steps\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Azure Blob Storage connection string and container name"", ""description"": ""Azure Blob Storage connection string and container name""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CohesitySecurity/Data%20Connectors/Helios2Sentinel/Cohesity_API_FunctionApp.json","true" +"","Common Event Format","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Common%20Event%20Format","azuresentinel","azure-sentinel-solution-commoneventformat","2022-05-30","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"CommvaultSecurityIQ_CL","Commvault Security IQ","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Commvault%20Security%20IQ","commvault","microsoft-sentinel-solution-commvaultsecurityiq","2023-08-17","","","Commvault","Partner","https://www.commvault.com/support","","domains","CommvaultSecurityIQ_CL","Commvault","CommvaultSecurityIQ","This Azure Function enables Commvault users to ingest alerts/events into their Microsoft Sentinel instance. With Analytic Rules,Microsoft Sentinel can automatically create Microsoft Sentinel incidents from incoming events and logs.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Commvault Instance to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Commvalut QSDK Token**\n\n[Follow these instructions](https://documentation.commvault.com/2024e/essential/creating_access_token.html) to create an API Token.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the CommvaultSecurityIQ data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Commvault Endpoint URL and QSDK Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": ""**Azure Resource Manager (ARM) Template**\n\nUse this method for automated deployment of the Commvault Security IQ data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CommvaultSecurityIQ-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Region**. \n3. Enter the **Workspace ID**, **Workspace Key** 'and/or Other required fields' and click Next. \n4. Click **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Commvault Environment Endpoint URL"", ""description"": ""Make sure to follow the documentation and set the secret value in KeyVault""}, {""name"": ""Commvault QSDK Token"", ""description"": ""Make sure to follow the documentation and set the secret value in KeyVault""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Commvault%20Security%20IQ/Data%20Connectors/CommvaultSecurityIQ_API_AzureFunctionApp.json","true" +"","ContinuousDiagnostics&Mitigation","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ContinuousDiagnostics%26Mitigation","azuresentinel","azure-sentinel-solution-continuousdiagnostics","2022-08-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"CommonSecurityLog","Contrast Protect","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Contrast%20Protect","contrast_security","contrast_protect_azure_sentinel_solution","2021-10-20","","","Contrast Protect","Partner","https://docs.contrastsecurity.com/","","domains","ContrastProtect","Contrast Security","[Deprecated] Contrast Protect via Legacy Agent","Contrast Protect mitigates security threats in production applications with runtime protection and observability. Attack event results (blocked, probed, suspicious...) and other information can be sent to Microsoft Microsoft Sentinel to blend with security information from other systems.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Configure the Contrast Protect agent to forward events to syslog as described here: https://docs.contrastsecurity.com/en/output-to-syslog.html. Generate some attack events for your application.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Contrast%20Protect/Data%20Connectors/ContrastProtect.json","true" +"CommonSecurityLog","Contrast Protect","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Contrast%20Protect","contrast_security","contrast_protect_azure_sentinel_solution","2021-10-20","","","Contrast Protect","Partner","https://docs.contrastsecurity.com/","","domains","ContrastProtectAma","Contrast Security","[Deprecated] Contrast Protect via AMA","Contrast Protect mitigates security threats in production applications with runtime protection and observability. Attack event results (blocked, probed, suspicious...) and other information can be sent to Microsoft Microsoft Sentinel to blend with security information from other systems.","[{""title"": """", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Configure the Contrast Protect agent to forward events to syslog as described here: https://docs.contrastsecurity.com/en/output-to-syslog.html. Generate some attack events for your application."", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Contrast%20Protect/Data%20Connectors/template_ContrastProtectAMA.json","true" +"ContrastADRIncident_CL","ContrastADR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ContrastADR","contrast_security","contrast_adr_azure_sentinel_solution","2025-01-18","2025-01-18","","Contrast Security","Partner","https://support.contrastsecurity.com/hc/en-us","","domains","ContrastADR","Contrast Security","ContrastADR","The ContrastADR data connector provides the capability to ingest Contrast ADR attack events into Microsoft Sentinel using the ContrastADR Webhook. ContrastADR data connector can enrich the incoming webhook data with ContrastADR API enrichment calls.","[{""title"": """", ""description"": ""Use these Workspace id and primakey key as shared key in azure function app"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method to automate deployment of the ContrastADR Data Connector using ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ContrastADR-azuredeploy)\n2. Provide the following parameters: Region, Function Name, LOG_ANALYTICS_SHARED_KEY, LOG_ANALYTICS_WORKSPACE_ID ""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ContrastADR/Data%20Connectors/ContrastADR_API_FunctionApp.json","true" +"ContrastADR_CL","ContrastADR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ContrastADR","contrast_security","contrast_adr_azure_sentinel_solution","2025-01-18","2025-01-18","","Contrast Security","Partner","https://support.contrastsecurity.com/hc/en-us","","domains","ContrastADR","Contrast Security","ContrastADR","The ContrastADR data connector provides the capability to ingest Contrast ADR attack events into Microsoft Sentinel using the ContrastADR Webhook. ContrastADR data connector can enrich the incoming webhook data with ContrastADR API enrichment calls.","[{""title"": """", ""description"": ""Use these Workspace id and primakey key as shared key in azure function app"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method to automate deployment of the ContrastADR Data Connector using ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ContrastADR-azuredeploy)\n2. Provide the following parameters: Region, Function Name, LOG_ANALYTICS_SHARED_KEY, LOG_ANALYTICS_WORKSPACE_ID ""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ContrastADR/Data%20Connectors/ContrastADR_API_FunctionApp.json","true" +"Corelight_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_bacnet_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_capture_loss_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_cip_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_conn_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_conn_long_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_conn_red_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_corelight_burst_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_corelight_overall_capture_loss_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_corelight_profiling_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_datared_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_dce_rpc_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_dga_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_dhcp_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_dnp3_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_dns_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_dns_red_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_dpd_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_encrypted_dns_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_enip_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_enip_debug_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_enip_list_identity_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_etc_viz_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_files_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_files_red_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_ftp_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_generic_dns_tunnels_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_generic_icmp_tunnels_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_http2_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_http_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_http_red_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_icmp_specific_tunnels_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_intel_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_ipsec_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_irc_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_iso_cotp_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_kerberos_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_known_certs_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_known_devices_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_known_domains_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_known_hosts_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_known_names_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_known_remotes_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_known_services_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_known_users_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_local_subnets_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_local_subnets_dj_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_local_subnets_graphs_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_log4shell_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_modbus_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_mqtt_connect_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_mqtt_publish_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_mqtt_subscribe_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_mysql_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_notice_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_ntlm_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_ntp_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_ocsp_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_openflow_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_packet_filter_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_pe_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_profinet_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_profinet_dce_rpc_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_profinet_debug_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_radius_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_rdp_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_reporter_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_rfb_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_s7comm_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_signatures_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_sip_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_smartpcap_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_smartpcap_stats_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_smb_files_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_smb_mapping_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_smtp_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_smtp_links_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_snmp_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_socks_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_software_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_specific_dns_tunnels_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_ssh_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_ssl_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_ssl_red_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_stats_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_stepping_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_stun_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_stun_nat_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_suricata_corelight_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_suricata_eve_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_suricata_stats_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_suricata_zeek_stats_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_syslog_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_tds_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_tds_rpc_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_tds_sql_batch_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_traceroute_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_tunnel_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_unknown_smartpcap_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_util_stats_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_vpn_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_weird_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_weird_red_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_weird_stats_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_wireguard_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_x509_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_x509_red_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"Corelight_v2_zeek_doctor_CL","Corelight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight","corelightinc1584998267292","corelight-for-azure-sentinel","2022-06-01","","","Corelight","Partner","https://support.corelight.com/","","domains","CorelightConnectorExporter","Corelight","Corelight Connector Exporter","The [Corelight](https://corelight.com/) data connector enables incident responders and threat hunters who use Microsoft Sentinel to work faster and more effectively. The data connector enables ingestion of events from [Zeek](https://zeek.org/) and [Suricata](https://suricata-ids.org/) via Corelight Sensors into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Corelight**](https://aka.ms/sentinel-Corelight-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Get the files"", ""description"": ""Contact your TAM, SE, or info@corelight.com to get the files needed for the Microsoft Sentinel integration.""}, {""title"": ""2. Replay sample data."", ""description"": ""Replay sample data to create the needed tables in your Log Analytics workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Send sample data (only needed once per Log Analytics workspace)"", ""value"": ""./send_samples.py --workspace-id {0} --workspace-key {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Install custom exporter."", ""description"": ""Install the custom exporter or the logstash container.""}, {""title"": ""4. Configure the Corelight Sensor to send logs to the Azure Log Analytics Agent."", ""description"": ""Using the following values, configure your Corelight Sensor to use the Microsoft Sentinel exporter. Alternatively, you can configure the logstash container with these values and configure your sensor to send JSON over TCP to that container on the appropriate port."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Workspace Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Corelight/Data%20Connectors/CorelightConnectorExporter.json","true" +"PaloAltoCortexXDR_Alerts_CL","Cortex XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cortex%20XDR","defendlimited1682894612656","cortex_xdr_connector","2023-07-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CortexXDRDataConnector","Microsoft","Palo Alto Cortex XDR","The [Palo Alto Cortex XDR](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/09agw06t5dpvw-cortex-xdr-rest-api) data connector allows ingesting logs from the Palo Alto Cortex XDR API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the Palo Alto Cortex XDR API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the Palo Alto Cortex XDR API \n Follow the instructions to obtain the credentials. you can also follow this [guide](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/3u3j0e7hcx8t1-get-started-with-cortex-xdr-ap-is) to generate API key.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve API URL\n 1.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 1.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 1.3. Under [**Integrations**] click on [**API Keys**].\n 1.4. In the [**Settings**] Page click on [**Copy API URL**] in the top right corner.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve API Token\n 2.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 2.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 2.3. Under [**Integrations**] click on [**API Keys**].\n 2.4. In the [**Settings**] Page click on [**New Key**] in the top right corner.\n 2.5. Choose security level, role, choose Standard and click on [**Generate**]\n 2.6. Copy the API Token, once it generated the [**API Token ID**] can be found under the ID column""}}, {""parameters"": {""label"": ""Base API URL"", ""placeholder"": ""https://api-example.xdr.au.paloaltonetworks.com"", ""type"": ""text"", ""name"": ""apiUrl""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""API Key ID"", ""placeholder"": ""API ID"", ""type"": ""text"", ""name"": ""apiId""}, ""type"": ""Textbox""}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""password"", ""name"": ""apiToken""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cortex%20XDR/Data%20Connectors/CortexXDR_ccp/DataConnectorDefinition.json","true" +"PaloAltoCortexXDR_Audit_Agent_CL","Cortex XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cortex%20XDR","defendlimited1682894612656","cortex_xdr_connector","2023-07-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CortexXDRDataConnector","Microsoft","Palo Alto Cortex XDR","The [Palo Alto Cortex XDR](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/09agw06t5dpvw-cortex-xdr-rest-api) data connector allows ingesting logs from the Palo Alto Cortex XDR API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the Palo Alto Cortex XDR API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the Palo Alto Cortex XDR API \n Follow the instructions to obtain the credentials. you can also follow this [guide](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/3u3j0e7hcx8t1-get-started-with-cortex-xdr-ap-is) to generate API key.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve API URL\n 1.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 1.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 1.3. Under [**Integrations**] click on [**API Keys**].\n 1.4. In the [**Settings**] Page click on [**Copy API URL**] in the top right corner.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve API Token\n 2.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 2.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 2.3. Under [**Integrations**] click on [**API Keys**].\n 2.4. In the [**Settings**] Page click on [**New Key**] in the top right corner.\n 2.5. Choose security level, role, choose Standard and click on [**Generate**]\n 2.6. Copy the API Token, once it generated the [**API Token ID**] can be found under the ID column""}}, {""parameters"": {""label"": ""Base API URL"", ""placeholder"": ""https://api-example.xdr.au.paloaltonetworks.com"", ""type"": ""text"", ""name"": ""apiUrl""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""API Key ID"", ""placeholder"": ""API ID"", ""type"": ""text"", ""name"": ""apiId""}, ""type"": ""Textbox""}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""password"", ""name"": ""apiToken""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cortex%20XDR/Data%20Connectors/CortexXDR_ccp/DataConnectorDefinition.json","true" +"PaloAltoCortexXDR_Audit_Management_CL","Cortex XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cortex%20XDR","defendlimited1682894612656","cortex_xdr_connector","2023-07-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CortexXDRDataConnector","Microsoft","Palo Alto Cortex XDR","The [Palo Alto Cortex XDR](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/09agw06t5dpvw-cortex-xdr-rest-api) data connector allows ingesting logs from the Palo Alto Cortex XDR API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the Palo Alto Cortex XDR API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the Palo Alto Cortex XDR API \n Follow the instructions to obtain the credentials. you can also follow this [guide](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/3u3j0e7hcx8t1-get-started-with-cortex-xdr-ap-is) to generate API key.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve API URL\n 1.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 1.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 1.3. Under [**Integrations**] click on [**API Keys**].\n 1.4. In the [**Settings**] Page click on [**Copy API URL**] in the top right corner.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve API Token\n 2.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 2.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 2.3. Under [**Integrations**] click on [**API Keys**].\n 2.4. In the [**Settings**] Page click on [**New Key**] in the top right corner.\n 2.5. Choose security level, role, choose Standard and click on [**Generate**]\n 2.6. Copy the API Token, once it generated the [**API Token ID**] can be found under the ID column""}}, {""parameters"": {""label"": ""Base API URL"", ""placeholder"": ""https://api-example.xdr.au.paloaltonetworks.com"", ""type"": ""text"", ""name"": ""apiUrl""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""API Key ID"", ""placeholder"": ""API ID"", ""type"": ""text"", ""name"": ""apiId""}, ""type"": ""Textbox""}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""password"", ""name"": ""apiToken""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cortex%20XDR/Data%20Connectors/CortexXDR_ccp/DataConnectorDefinition.json","true" +"PaloAltoCortexXDR_Endpoints_CL","Cortex XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cortex%20XDR","defendlimited1682894612656","cortex_xdr_connector","2023-07-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CortexXDRDataConnector","Microsoft","Palo Alto Cortex XDR","The [Palo Alto Cortex XDR](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/09agw06t5dpvw-cortex-xdr-rest-api) data connector allows ingesting logs from the Palo Alto Cortex XDR API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the Palo Alto Cortex XDR API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the Palo Alto Cortex XDR API \n Follow the instructions to obtain the credentials. you can also follow this [guide](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/3u3j0e7hcx8t1-get-started-with-cortex-xdr-ap-is) to generate API key.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve API URL\n 1.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 1.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 1.3. Under [**Integrations**] click on [**API Keys**].\n 1.4. In the [**Settings**] Page click on [**Copy API URL**] in the top right corner.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve API Token\n 2.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 2.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 2.3. Under [**Integrations**] click on [**API Keys**].\n 2.4. In the [**Settings**] Page click on [**New Key**] in the top right corner.\n 2.5. Choose security level, role, choose Standard and click on [**Generate**]\n 2.6. Copy the API Token, once it generated the [**API Token ID**] can be found under the ID column""}}, {""parameters"": {""label"": ""Base API URL"", ""placeholder"": ""https://api-example.xdr.au.paloaltonetworks.com"", ""type"": ""text"", ""name"": ""apiUrl""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""API Key ID"", ""placeholder"": ""API ID"", ""type"": ""text"", ""name"": ""apiId""}, ""type"": ""Textbox""}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""password"", ""name"": ""apiToken""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cortex%20XDR/Data%20Connectors/CortexXDR_ccp/DataConnectorDefinition.json","true" +"PaloAltoCortexXDR_Incidents_CL","Cortex XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cortex%20XDR","defendlimited1682894612656","cortex_xdr_connector","2023-07-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CortexXDRDataConnector","Microsoft","Palo Alto Cortex XDR","The [Palo Alto Cortex XDR](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/09agw06t5dpvw-cortex-xdr-rest-api) data connector allows ingesting logs from the Palo Alto Cortex XDR API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the Palo Alto Cortex XDR API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the Palo Alto Cortex XDR API \n Follow the instructions to obtain the credentials. you can also follow this [guide](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/3u3j0e7hcx8t1-get-started-with-cortex-xdr-ap-is) to generate API key.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve API URL\n 1.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 1.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 1.3. Under [**Integrations**] click on [**API Keys**].\n 1.4. In the [**Settings**] Page click on [**Copy API URL**] in the top right corner.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve API Token\n 2.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 2.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 2.3. Under [**Integrations**] click on [**API Keys**].\n 2.4. In the [**Settings**] Page click on [**New Key**] in the top right corner.\n 2.5. Choose security level, role, choose Standard and click on [**Generate**]\n 2.6. Copy the API Token, once it generated the [**API Token ID**] can be found under the ID column""}}, {""parameters"": {""label"": ""Base API URL"", ""placeholder"": ""https://api-example.xdr.au.paloaltonetworks.com"", ""type"": ""text"", ""name"": ""apiUrl""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""API Key ID"", ""placeholder"": ""API ID"", ""type"": ""text"", ""name"": ""apiId""}, ""type"": ""Textbox""}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""password"", ""name"": ""apiToken""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cortex%20XDR/Data%20Connectors/CortexXDR_ccp/DataConnectorDefinition.json","true" +"CriblAccess_CL","Cribl","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cribl","criblinc1673975616879","microsoft-sentinel-solution-cribl","2024-08-01","2024-09-05","","Cribl","Partner","https://www.cribl.io/support/","","domains","Cribl","Cribl","Cribl","The [Cribl](https://cribl.io/accelerate-cloud-migration/) connector allows you to easily connect your Cribl (Cribl Enterprise Edition - Standalone) logs with Microsoft Sentinel. This gives you more security insight into your organization's data pipelines.","[{""title"": ""Installation and setup instructions for Cribl Stream for Microsoft Sentinel"", ""description"": ""Use the documentation from this Github repository and configure Cribl Stream using \n\nhttps://docs.cribl.io/stream/usecase-azure-workspace/""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cribl/Data%20Connectors/Connector_Cribl.json","true" +"CriblAudit_CL","Cribl","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cribl","criblinc1673975616879","microsoft-sentinel-solution-cribl","2024-08-01","2024-09-05","","Cribl","Partner","https://www.cribl.io/support/","","domains","Cribl","Cribl","Cribl","The [Cribl](https://cribl.io/accelerate-cloud-migration/) connector allows you to easily connect your Cribl (Cribl Enterprise Edition - Standalone) logs with Microsoft Sentinel. This gives you more security insight into your organization's data pipelines.","[{""title"": ""Installation and setup instructions for Cribl Stream for Microsoft Sentinel"", ""description"": ""Use the documentation from this Github repository and configure Cribl Stream using \n\nhttps://docs.cribl.io/stream/usecase-azure-workspace/""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cribl/Data%20Connectors/Connector_Cribl.json","true" +"CriblInternal_CL","Cribl","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cribl","criblinc1673975616879","microsoft-sentinel-solution-cribl","2024-08-01","2024-09-05","","Cribl","Partner","https://www.cribl.io/support/","","domains","Cribl","Cribl","Cribl","The [Cribl](https://cribl.io/accelerate-cloud-migration/) connector allows you to easily connect your Cribl (Cribl Enterprise Edition - Standalone) logs with Microsoft Sentinel. This gives you more security insight into your organization's data pipelines.","[{""title"": ""Installation and setup instructions for Cribl Stream for Microsoft Sentinel"", ""description"": ""Use the documentation from this Github repository and configure Cribl Stream using \n\nhttps://docs.cribl.io/stream/usecase-azure-workspace/""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cribl/Data%20Connectors/Connector_Cribl.json","true" +"CriblUIAccess_CL","Cribl","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cribl","criblinc1673975616879","microsoft-sentinel-solution-cribl","2024-08-01","2024-09-05","","Cribl","Partner","https://www.cribl.io/support/","","domains","Cribl","Cribl","Cribl","The [Cribl](https://cribl.io/accelerate-cloud-migration/) connector allows you to easily connect your Cribl (Cribl Enterprise Edition - Standalone) logs with Microsoft Sentinel. This gives you more security insight into your organization's data pipelines.","[{""title"": ""Installation and setup instructions for Cribl Stream for Microsoft Sentinel"", ""description"": ""Use the documentation from this Github repository and configure Cribl Stream using \n\nhttps://docs.cribl.io/stream/usecase-azure-workspace/""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cribl/Data%20Connectors/Connector_Cribl.json","true" +"CrowdStrikeAlerts","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeAPICCPDefinition","Microsoft","CrowdStrike API Data Connector (via Codeless Connector Framework)","The [CrowdStrike Data Connector](https://www.crowdstrike.com/) allows ingesting logs from the CrowdStrike API into Microsoft Sentinel. This connector is built on the Microsoft Sentinel Codeless Connector Platform and uses the CrowdStrike API to fetch logs for Alerts, Detections, Hosts, Incidents, and Vulnerabilities. It supports DCR-based ingestion time transformations so that queries can run more efficiently.","[{""title"": ""Configuration steps for the CrowdStrike API"", ""description"": ""Follow the instructions below to obtain your CrowdStrike API credentials."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve API URL\nLog in to your CrowdStrike Console and navigate to the API section to copy your Base API URL.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve Client Credentials\nObtain your Client ID and Client Secret from the API credentials section in your CrowdStrike account.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Base API URL"", ""placeholder"": ""https://api.us-2.crowdstrike.com"", ""type"": ""text"", ""name"": ""apiUrl"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client ID"", ""placeholder"": ""Your Client ID"", ""type"": ""text"", ""name"": ""clientId"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client Secret"", ""placeholder"": ""Your Client Secret"", ""type"": ""password"", ""name"": ""clientSecret"", ""validations"": {""required"": true}}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""disconnectLabel"": ""Disconnect"", ""name"": ""toggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeAPI_ccp/CrowdStrikeAPI_Definition.json","true" +"CrowdStrikeDetections","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeAPICCPDefinition","Microsoft","CrowdStrike API Data Connector (via Codeless Connector Framework)","The [CrowdStrike Data Connector](https://www.crowdstrike.com/) allows ingesting logs from the CrowdStrike API into Microsoft Sentinel. This connector is built on the Microsoft Sentinel Codeless Connector Platform and uses the CrowdStrike API to fetch logs for Alerts, Detections, Hosts, Incidents, and Vulnerabilities. It supports DCR-based ingestion time transformations so that queries can run more efficiently.","[{""title"": ""Configuration steps for the CrowdStrike API"", ""description"": ""Follow the instructions below to obtain your CrowdStrike API credentials."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve API URL\nLog in to your CrowdStrike Console and navigate to the API section to copy your Base API URL.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve Client Credentials\nObtain your Client ID and Client Secret from the API credentials section in your CrowdStrike account.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Base API URL"", ""placeholder"": ""https://api.us-2.crowdstrike.com"", ""type"": ""text"", ""name"": ""apiUrl"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client ID"", ""placeholder"": ""Your Client ID"", ""type"": ""text"", ""name"": ""clientId"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client Secret"", ""placeholder"": ""Your Client Secret"", ""type"": ""password"", ""name"": ""clientSecret"", ""validations"": {""required"": true}}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""disconnectLabel"": ""Disconnect"", ""name"": ""toggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeAPI_ccp/CrowdStrikeAPI_Definition.json","true" +"CrowdStrikeHosts","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeAPICCPDefinition","Microsoft","CrowdStrike API Data Connector (via Codeless Connector Framework)","The [CrowdStrike Data Connector](https://www.crowdstrike.com/) allows ingesting logs from the CrowdStrike API into Microsoft Sentinel. This connector is built on the Microsoft Sentinel Codeless Connector Platform and uses the CrowdStrike API to fetch logs for Alerts, Detections, Hosts, Incidents, and Vulnerabilities. It supports DCR-based ingestion time transformations so that queries can run more efficiently.","[{""title"": ""Configuration steps for the CrowdStrike API"", ""description"": ""Follow the instructions below to obtain your CrowdStrike API credentials."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve API URL\nLog in to your CrowdStrike Console and navigate to the API section to copy your Base API URL.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve Client Credentials\nObtain your Client ID and Client Secret from the API credentials section in your CrowdStrike account.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Base API URL"", ""placeholder"": ""https://api.us-2.crowdstrike.com"", ""type"": ""text"", ""name"": ""apiUrl"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client ID"", ""placeholder"": ""Your Client ID"", ""type"": ""text"", ""name"": ""clientId"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client Secret"", ""placeholder"": ""Your Client Secret"", ""type"": ""password"", ""name"": ""clientSecret"", ""validations"": {""required"": true}}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""disconnectLabel"": ""Disconnect"", ""name"": ""toggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeAPI_ccp/CrowdStrikeAPI_Definition.json","true" +"CrowdStrikeIncidents","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeAPICCPDefinition","Microsoft","CrowdStrike API Data Connector (via Codeless Connector Framework)","The [CrowdStrike Data Connector](https://www.crowdstrike.com/) allows ingesting logs from the CrowdStrike API into Microsoft Sentinel. This connector is built on the Microsoft Sentinel Codeless Connector Platform and uses the CrowdStrike API to fetch logs for Alerts, Detections, Hosts, Incidents, and Vulnerabilities. It supports DCR-based ingestion time transformations so that queries can run more efficiently.","[{""title"": ""Configuration steps for the CrowdStrike API"", ""description"": ""Follow the instructions below to obtain your CrowdStrike API credentials."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve API URL\nLog in to your CrowdStrike Console and navigate to the API section to copy your Base API URL.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve Client Credentials\nObtain your Client ID and Client Secret from the API credentials section in your CrowdStrike account.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Base API URL"", ""placeholder"": ""https://api.us-2.crowdstrike.com"", ""type"": ""text"", ""name"": ""apiUrl"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client ID"", ""placeholder"": ""Your Client ID"", ""type"": ""text"", ""name"": ""clientId"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client Secret"", ""placeholder"": ""Your Client Secret"", ""type"": ""password"", ""name"": ""clientSecret"", ""validations"": {""required"": true}}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""disconnectLabel"": ""Disconnect"", ""name"": ""toggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeAPI_ccp/CrowdStrikeAPI_Definition.json","true" +"CrowdStrikeVulnerabilities","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeAPICCPDefinition","Microsoft","CrowdStrike API Data Connector (via Codeless Connector Framework)","The [CrowdStrike Data Connector](https://www.crowdstrike.com/) allows ingesting logs from the CrowdStrike API into Microsoft Sentinel. This connector is built on the Microsoft Sentinel Codeless Connector Platform and uses the CrowdStrike API to fetch logs for Alerts, Detections, Hosts, Incidents, and Vulnerabilities. It supports DCR-based ingestion time transformations so that queries can run more efficiently.","[{""title"": ""Configuration steps for the CrowdStrike API"", ""description"": ""Follow the instructions below to obtain your CrowdStrike API credentials."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve API URL\nLog in to your CrowdStrike Console and navigate to the API section to copy your Base API URL.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve Client Credentials\nObtain your Client ID and Client Secret from the API credentials section in your CrowdStrike account.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Base API URL"", ""placeholder"": ""https://api.us-2.crowdstrike.com"", ""type"": ""text"", ""name"": ""apiUrl"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client ID"", ""placeholder"": ""Your Client ID"", ""type"": ""text"", ""name"": ""clientId"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client Secret"", ""placeholder"": ""Your Client Secret"", ""type"": ""password"", ""name"": ""clientSecret"", ""validations"": {""required"": true}}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""disconnectLabel"": ""Disconnect"", ""name"": ""toggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeAPI_ccp/CrowdStrikeAPI_Definition.json","true" +"ThreatIntelligenceIndicator","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeFalconAdversaryIntelligence","CrowdStrike","CrowdStrike Falcon Adversary Intelligence ","The [CrowdStrike](https://www.crowdstrike.com/) Falcon Indicators of Compromise connector retrieves the Indicators of Compromise from the Falcon Intel API and uploads them [Microsoft Sentinel Threat Intel](https://learn.microsoft.com/en-us/azure/sentinel/understand-threat-intelligence).","[{""title"": """", ""description"": ""**STEP 1 - [Generate CrowdStrike API credentials](https://www.crowdstrike.com/blog/tech-center/get-access-falcon-apis/).**\n""}, {""title"": """", ""description"": ""Make sure 'Indicators (Falcon Intelligence)' scope has 'read' selected""}, {""title"": """", ""description"": ""**STEP 2 - [Register an Entra App](https://learn.microsoft.com/en-us/entra/identity-platform/quickstart-register-app) with client secret.**\n""}, {""title"": """", ""description"": ""Provide the Entra App principal with 'Microsoft Sentinel Contributor' role assignment on the respective log analytics workspace. [How to assign roles on Azure](https://learn.microsoft.com/en-us/azure/role-based-access-control/role-assignments-portal).""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the CrowdStrike Falcon Indicator of Compromise connector, have the Workspace ID (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the CrowdStrike Falcon Adversary Intelligence connector connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CrowdStrikeFalconAdversaryIntelligence-azuredeploy)\n2. Provide the following parameters: CrowdStrikeClientId, CrowdStrikeClientSecret, CrowdStrikeBaseUrl, WorkspaceId, TenantId, Indicators, AadClientId, AadClientSecret, LookBackDays""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the CrowdStrike Falcon Adversary Intelligence connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CrowdStrikeFalconAdversaryIntelligence-Functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CrowdStrikeFalconIOCXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tCROWDSTRIKE_CLIENT_ID\n\t\tCROWDSTRIKE_CLIENT_SECRET\n\t\tCROWDSTRIKE_BASE_URL\n\t\tTENANT_ID\n\t\tINDICATORS\n\t\tWorkspaceKey\n\t\tAAD_CLIENT_ID\n\t\tAAD_CLIENT_SECRET \n\t\tLOOK_BACK_DAYS \n\t\tWORKSPACE_ID \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""CrowdStrike API Client ID and Client Secret"", ""description"": ""**CROWDSTRIKE_CLIENT_ID**, **CROWDSTRIKE_CLIENT_SECRET**, **CROWDSTRIKE_BASE_URL**. CrowdStrike credentials must have Indicators (Falcon Intelligence) read scope.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeFalconAdversaryIntelligence/CrowdStrikeFalconAdversaryIntelligence_FunctionApp.json","true" +"CommonSecurityLog","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeFalconEndpointProtection","CrowdStrike","[Deprecated] CrowdStrike Falcon Endpoint Protection via Legacy Agent","The [CrowdStrike Falcon Endpoint Protection](https://www.crowdstrike.com/endpoint-security-products/) connector allows you to easily connect your CrowdStrike Falcon Event Stream with Microsoft Sentinel, to create custom dashboards, alerts, and improve investigation. This gives you more insight into your organization's endpoints and improves your security operation capabilities.

NOTE: This data connector has been deprecated, consider moving to the CCP data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Crowd Strike Falcon Endpoint Protection and load the function code or click [here](https://aka.ms/sentinel-crowdstrikefalconendpointprotection-parser), on the second line of the query, enter the hostname(s) of your CrowdStrikeFalcon device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward CrowdStrike Falcon Event Stream logs to a Syslog agent"", ""description"": ""Deploy the CrowdStrike Falcon SIEM Collector to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent.\n1. [Follow these instructions](https://www.crowdstrike.com/blog/tech-center/integrate-with-your-siem/) to deploy the SIEM Collector and forward syslog\n2. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/Connector_Syslog_CrowdStrikeFalconEndpointProtection.json","true" +"CommonSecurityLog","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeFalconEndpointProtectionAma","CrowdStrike","[Deprecated] CrowdStrike Falcon Endpoint Protection via AMA","The [CrowdStrike Falcon Endpoint Protection](https://www.crowdstrike.com/endpoint-security-products/) connector allows you to easily connect your CrowdStrike Falcon Event Stream with Microsoft Sentinel, to create custom dashboards, alerts, and improve investigation. This gives you more insight into your organization's endpoints and improves your security operation capabilities.

NOTE: This data connector has been deprecated, consider moving to the CCP data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Crowd Strike Falcon Endpoint Protection and load the function code or click [here](https://aka.ms/sentinel-crowdstrikefalconendpointprotection-parser), on the second line of the query, enter the hostname(s) of your CrowdStrikeFalcon device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Forward CrowdStrike Falcon Event Stream logs to a Syslog agent"", ""description"": ""Deploy the CrowdStrike Falcon SIEM Collector to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent.\n1. [Follow these instructions](https://www.crowdstrike.com/blog/tech-center/integrate-with-your-siem/) to deploy the SIEM Collector and forward syslog\n2. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address."", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/template_CrowdStrikeFalconEndpointProtectionAma.json","true" +"CrowdStrike_Additional_Events_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeFalconS3CCPDefinition","Microsoft","CrowdStrike Falcon Data Replicator (AWS S3) (via Codeless Connector Framework)","The Crowdstrike Falcon Data Replicator (S3) connector provides the capability to ingest FDR event datainto Microsoft Sentinel from the AWS S3 bucket where the FDR logs have been streamed. The connector provides ability to get events from Falcon Agents which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector requires an IAM role to be configured on AWS to allow access to the AWS S3 bucket and may not be suitable for environments that leverage CrowdStrike - managed buckets.

3. For environments that leverage CrowdStrike-managed buckets, please configure the CrowdStrike Falcon Data Replicator (CrowdStrike-Managed AWS S3) connector.

","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Requirements: \n In order to use the Falcon Data Replicator feature the following are required: \n 1. **Subscription:** \n 1.1. Falcon Data Replicator. \n 1.2. Falcon Insight XDR. \n 2. **Roles:** \n 2.1. Falcon Administrator.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Setup your CrowdStrike & AWS environments \n To configure access on AWS, use the following two templates provided to set up the AWS environment. This will enable sending logs from an S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). \n 2. Choose the \u2018Specify template\u2019 option, then \u2018Upload a template file\u2019 by clicking on \u2018Choose file\u2019 and selecting the appropriate CloudFormation template file provided below. click \u2018Choose file\u2019 and select the downloaded template. \n 3. Click 'Next' and 'Create stack'.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""Make sure that your bucket will be created in the same AWS region as your Falcon CID where the FDR feed is provisioned. \n | CrowdStrike region | AWS region | \n |-----------------|-----------|\n | US-1 | us-west-1 |\n | US-2 | us-west-2 | \n | EU-1 | eu-central-1 ""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS CrowdStrike resources deployment"", ""isMultiLine"": true, ""fillWith"": [""CrowdStrike""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### Using your own S3 Bucket \n In order to use your own S3 bucket you can refernace the following guide [Use your own S3 bucket](https://falcon.us-2.crowdstrike.com/documentation/page/fa572b1c/falcon-data-replicator#g4f79236) or follow this steps: \n 1. Create support case with the following Name: **Using Self S3 bucket for FDR** \n 2. Add the following information: \n 2.1. The Falcon CID where your FDR feed is provisioned \n 2.2. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.3. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.4. Do not use any partitions. ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": "" | Event type | S3 prefix | \n |-----------------|-----------|\n | Primary Events | data/ |\n | Secondary Events | fdrv2/ ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-CrowdstrikeStream"", ""text"": ""Primary Events""}, {""key"": ""Custom-CrowdStrikeSecondary"", ""text"": ""Secondary Events""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeS3FDR_ccp/DataConnectorDefinition.json","true" +"CrowdStrike_Audit_Events_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeFalconS3CCPDefinition","Microsoft","CrowdStrike Falcon Data Replicator (AWS S3) (via Codeless Connector Framework)","The Crowdstrike Falcon Data Replicator (S3) connector provides the capability to ingest FDR event datainto Microsoft Sentinel from the AWS S3 bucket where the FDR logs have been streamed. The connector provides ability to get events from Falcon Agents which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector requires an IAM role to be configured on AWS to allow access to the AWS S3 bucket and may not be suitable for environments that leverage CrowdStrike - managed buckets.

3. For environments that leverage CrowdStrike-managed buckets, please configure the CrowdStrike Falcon Data Replicator (CrowdStrike-Managed AWS S3) connector.

","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Requirements: \n In order to use the Falcon Data Replicator feature the following are required: \n 1. **Subscription:** \n 1.1. Falcon Data Replicator. \n 1.2. Falcon Insight XDR. \n 2. **Roles:** \n 2.1. Falcon Administrator.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Setup your CrowdStrike & AWS environments \n To configure access on AWS, use the following two templates provided to set up the AWS environment. This will enable sending logs from an S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). \n 2. Choose the \u2018Specify template\u2019 option, then \u2018Upload a template file\u2019 by clicking on \u2018Choose file\u2019 and selecting the appropriate CloudFormation template file provided below. click \u2018Choose file\u2019 and select the downloaded template. \n 3. Click 'Next' and 'Create stack'.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""Make sure that your bucket will be created in the same AWS region as your Falcon CID where the FDR feed is provisioned. \n | CrowdStrike region | AWS region | \n |-----------------|-----------|\n | US-1 | us-west-1 |\n | US-2 | us-west-2 | \n | EU-1 | eu-central-1 ""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS CrowdStrike resources deployment"", ""isMultiLine"": true, ""fillWith"": [""CrowdStrike""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### Using your own S3 Bucket \n In order to use your own S3 bucket you can refernace the following guide [Use your own S3 bucket](https://falcon.us-2.crowdstrike.com/documentation/page/fa572b1c/falcon-data-replicator#g4f79236) or follow this steps: \n 1. Create support case with the following Name: **Using Self S3 bucket for FDR** \n 2. Add the following information: \n 2.1. The Falcon CID where your FDR feed is provisioned \n 2.2. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.3. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.4. Do not use any partitions. ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": "" | Event type | S3 prefix | \n |-----------------|-----------|\n | Primary Events | data/ |\n | Secondary Events | fdrv2/ ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-CrowdstrikeStream"", ""text"": ""Primary Events""}, {""key"": ""Custom-CrowdStrikeSecondary"", ""text"": ""Secondary Events""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeS3FDR_ccp/DataConnectorDefinition.json","true" +"CrowdStrike_Auth_Events_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeFalconS3CCPDefinition","Microsoft","CrowdStrike Falcon Data Replicator (AWS S3) (via Codeless Connector Framework)","The Crowdstrike Falcon Data Replicator (S3) connector provides the capability to ingest FDR event datainto Microsoft Sentinel from the AWS S3 bucket where the FDR logs have been streamed. The connector provides ability to get events from Falcon Agents which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector requires an IAM role to be configured on AWS to allow access to the AWS S3 bucket and may not be suitable for environments that leverage CrowdStrike - managed buckets.

3. For environments that leverage CrowdStrike-managed buckets, please configure the CrowdStrike Falcon Data Replicator (CrowdStrike-Managed AWS S3) connector.

","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Requirements: \n In order to use the Falcon Data Replicator feature the following are required: \n 1. **Subscription:** \n 1.1. Falcon Data Replicator. \n 1.2. Falcon Insight XDR. \n 2. **Roles:** \n 2.1. Falcon Administrator.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Setup your CrowdStrike & AWS environments \n To configure access on AWS, use the following two templates provided to set up the AWS environment. This will enable sending logs from an S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). \n 2. Choose the \u2018Specify template\u2019 option, then \u2018Upload a template file\u2019 by clicking on \u2018Choose file\u2019 and selecting the appropriate CloudFormation template file provided below. click \u2018Choose file\u2019 and select the downloaded template. \n 3. Click 'Next' and 'Create stack'.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""Make sure that your bucket will be created in the same AWS region as your Falcon CID where the FDR feed is provisioned. \n | CrowdStrike region | AWS region | \n |-----------------|-----------|\n | US-1 | us-west-1 |\n | US-2 | us-west-2 | \n | EU-1 | eu-central-1 ""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS CrowdStrike resources deployment"", ""isMultiLine"": true, ""fillWith"": [""CrowdStrike""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### Using your own S3 Bucket \n In order to use your own S3 bucket you can refernace the following guide [Use your own S3 bucket](https://falcon.us-2.crowdstrike.com/documentation/page/fa572b1c/falcon-data-replicator#g4f79236) or follow this steps: \n 1. Create support case with the following Name: **Using Self S3 bucket for FDR** \n 2. Add the following information: \n 2.1. The Falcon CID where your FDR feed is provisioned \n 2.2. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.3. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.4. Do not use any partitions. ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": "" | Event type | S3 prefix | \n |-----------------|-----------|\n | Primary Events | data/ |\n | Secondary Events | fdrv2/ ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-CrowdstrikeStream"", ""text"": ""Primary Events""}, {""key"": ""Custom-CrowdStrikeSecondary"", ""text"": ""Secondary Events""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeS3FDR_ccp/DataConnectorDefinition.json","true" +"CrowdStrike_DNS_Events_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeFalconS3CCPDefinition","Microsoft","CrowdStrike Falcon Data Replicator (AWS S3) (via Codeless Connector Framework)","The Crowdstrike Falcon Data Replicator (S3) connector provides the capability to ingest FDR event datainto Microsoft Sentinel from the AWS S3 bucket where the FDR logs have been streamed. The connector provides ability to get events from Falcon Agents which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector requires an IAM role to be configured on AWS to allow access to the AWS S3 bucket and may not be suitable for environments that leverage CrowdStrike - managed buckets.

3. For environments that leverage CrowdStrike-managed buckets, please configure the CrowdStrike Falcon Data Replicator (CrowdStrike-Managed AWS S3) connector.

","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Requirements: \n In order to use the Falcon Data Replicator feature the following are required: \n 1. **Subscription:** \n 1.1. Falcon Data Replicator. \n 1.2. Falcon Insight XDR. \n 2. **Roles:** \n 2.1. Falcon Administrator.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Setup your CrowdStrike & AWS environments \n To configure access on AWS, use the following two templates provided to set up the AWS environment. This will enable sending logs from an S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). \n 2. Choose the \u2018Specify template\u2019 option, then \u2018Upload a template file\u2019 by clicking on \u2018Choose file\u2019 and selecting the appropriate CloudFormation template file provided below. click \u2018Choose file\u2019 and select the downloaded template. \n 3. Click 'Next' and 'Create stack'.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""Make sure that your bucket will be created in the same AWS region as your Falcon CID where the FDR feed is provisioned. \n | CrowdStrike region | AWS region | \n |-----------------|-----------|\n | US-1 | us-west-1 |\n | US-2 | us-west-2 | \n | EU-1 | eu-central-1 ""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS CrowdStrike resources deployment"", ""isMultiLine"": true, ""fillWith"": [""CrowdStrike""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### Using your own S3 Bucket \n In order to use your own S3 bucket you can refernace the following guide [Use your own S3 bucket](https://falcon.us-2.crowdstrike.com/documentation/page/fa572b1c/falcon-data-replicator#g4f79236) or follow this steps: \n 1. Create support case with the following Name: **Using Self S3 bucket for FDR** \n 2. Add the following information: \n 2.1. The Falcon CID where your FDR feed is provisioned \n 2.2. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.3. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.4. Do not use any partitions. ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": "" | Event type | S3 prefix | \n |-----------------|-----------|\n | Primary Events | data/ |\n | Secondary Events | fdrv2/ ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-CrowdstrikeStream"", ""text"": ""Primary Events""}, {""key"": ""Custom-CrowdStrikeSecondary"", ""text"": ""Secondary Events""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeS3FDR_ccp/DataConnectorDefinition.json","true" +"CrowdStrike_File_Events_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeFalconS3CCPDefinition","Microsoft","CrowdStrike Falcon Data Replicator (AWS S3) (via Codeless Connector Framework)","The Crowdstrike Falcon Data Replicator (S3) connector provides the capability to ingest FDR event datainto Microsoft Sentinel from the AWS S3 bucket where the FDR logs have been streamed. The connector provides ability to get events from Falcon Agents which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector requires an IAM role to be configured on AWS to allow access to the AWS S3 bucket and may not be suitable for environments that leverage CrowdStrike - managed buckets.

3. For environments that leverage CrowdStrike-managed buckets, please configure the CrowdStrike Falcon Data Replicator (CrowdStrike-Managed AWS S3) connector.

","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Requirements: \n In order to use the Falcon Data Replicator feature the following are required: \n 1. **Subscription:** \n 1.1. Falcon Data Replicator. \n 1.2. Falcon Insight XDR. \n 2. **Roles:** \n 2.1. Falcon Administrator.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Setup your CrowdStrike & AWS environments \n To configure access on AWS, use the following two templates provided to set up the AWS environment. This will enable sending logs from an S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). \n 2. Choose the \u2018Specify template\u2019 option, then \u2018Upload a template file\u2019 by clicking on \u2018Choose file\u2019 and selecting the appropriate CloudFormation template file provided below. click \u2018Choose file\u2019 and select the downloaded template. \n 3. Click 'Next' and 'Create stack'.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""Make sure that your bucket will be created in the same AWS region as your Falcon CID where the FDR feed is provisioned. \n | CrowdStrike region | AWS region | \n |-----------------|-----------|\n | US-1 | us-west-1 |\n | US-2 | us-west-2 | \n | EU-1 | eu-central-1 ""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS CrowdStrike resources deployment"", ""isMultiLine"": true, ""fillWith"": [""CrowdStrike""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### Using your own S3 Bucket \n In order to use your own S3 bucket you can refernace the following guide [Use your own S3 bucket](https://falcon.us-2.crowdstrike.com/documentation/page/fa572b1c/falcon-data-replicator#g4f79236) or follow this steps: \n 1. Create support case with the following Name: **Using Self S3 bucket for FDR** \n 2. Add the following information: \n 2.1. The Falcon CID where your FDR feed is provisioned \n 2.2. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.3. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.4. Do not use any partitions. ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": "" | Event type | S3 prefix | \n |-----------------|-----------|\n | Primary Events | data/ |\n | Secondary Events | fdrv2/ ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-CrowdstrikeStream"", ""text"": ""Primary Events""}, {""key"": ""Custom-CrowdStrikeSecondary"", ""text"": ""Secondary Events""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeS3FDR_ccp/DataConnectorDefinition.json","true" +"CrowdStrike_Network_Events_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeFalconS3CCPDefinition","Microsoft","CrowdStrike Falcon Data Replicator (AWS S3) (via Codeless Connector Framework)","The Crowdstrike Falcon Data Replicator (S3) connector provides the capability to ingest FDR event datainto Microsoft Sentinel from the AWS S3 bucket where the FDR logs have been streamed. The connector provides ability to get events from Falcon Agents which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector requires an IAM role to be configured on AWS to allow access to the AWS S3 bucket and may not be suitable for environments that leverage CrowdStrike - managed buckets.

3. For environments that leverage CrowdStrike-managed buckets, please configure the CrowdStrike Falcon Data Replicator (CrowdStrike-Managed AWS S3) connector.

","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Requirements: \n In order to use the Falcon Data Replicator feature the following are required: \n 1. **Subscription:** \n 1.1. Falcon Data Replicator. \n 1.2. Falcon Insight XDR. \n 2. **Roles:** \n 2.1. Falcon Administrator.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Setup your CrowdStrike & AWS environments \n To configure access on AWS, use the following two templates provided to set up the AWS environment. This will enable sending logs from an S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). \n 2. Choose the \u2018Specify template\u2019 option, then \u2018Upload a template file\u2019 by clicking on \u2018Choose file\u2019 and selecting the appropriate CloudFormation template file provided below. click \u2018Choose file\u2019 and select the downloaded template. \n 3. Click 'Next' and 'Create stack'.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""Make sure that your bucket will be created in the same AWS region as your Falcon CID where the FDR feed is provisioned. \n | CrowdStrike region | AWS region | \n |-----------------|-----------|\n | US-1 | us-west-1 |\n | US-2 | us-west-2 | \n | EU-1 | eu-central-1 ""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS CrowdStrike resources deployment"", ""isMultiLine"": true, ""fillWith"": [""CrowdStrike""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### Using your own S3 Bucket \n In order to use your own S3 bucket you can refernace the following guide [Use your own S3 bucket](https://falcon.us-2.crowdstrike.com/documentation/page/fa572b1c/falcon-data-replicator#g4f79236) or follow this steps: \n 1. Create support case with the following Name: **Using Self S3 bucket for FDR** \n 2. Add the following information: \n 2.1. The Falcon CID where your FDR feed is provisioned \n 2.2. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.3. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.4. Do not use any partitions. ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": "" | Event type | S3 prefix | \n |-----------------|-----------|\n | Primary Events | data/ |\n | Secondary Events | fdrv2/ ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-CrowdstrikeStream"", ""text"": ""Primary Events""}, {""key"": ""Custom-CrowdStrikeSecondary"", ""text"": ""Secondary Events""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeS3FDR_ccp/DataConnectorDefinition.json","true" +"CrowdStrike_Process_Events_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeFalconS3CCPDefinition","Microsoft","CrowdStrike Falcon Data Replicator (AWS S3) (via Codeless Connector Framework)","The Crowdstrike Falcon Data Replicator (S3) connector provides the capability to ingest FDR event datainto Microsoft Sentinel from the AWS S3 bucket where the FDR logs have been streamed. The connector provides ability to get events from Falcon Agents which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector requires an IAM role to be configured on AWS to allow access to the AWS S3 bucket and may not be suitable for environments that leverage CrowdStrike - managed buckets.

3. For environments that leverage CrowdStrike-managed buckets, please configure the CrowdStrike Falcon Data Replicator (CrowdStrike-Managed AWS S3) connector.

","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Requirements: \n In order to use the Falcon Data Replicator feature the following are required: \n 1. **Subscription:** \n 1.1. Falcon Data Replicator. \n 1.2. Falcon Insight XDR. \n 2. **Roles:** \n 2.1. Falcon Administrator.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Setup your CrowdStrike & AWS environments \n To configure access on AWS, use the following two templates provided to set up the AWS environment. This will enable sending logs from an S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). \n 2. Choose the \u2018Specify template\u2019 option, then \u2018Upload a template file\u2019 by clicking on \u2018Choose file\u2019 and selecting the appropriate CloudFormation template file provided below. click \u2018Choose file\u2019 and select the downloaded template. \n 3. Click 'Next' and 'Create stack'.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""Make sure that your bucket will be created in the same AWS region as your Falcon CID where the FDR feed is provisioned. \n | CrowdStrike region | AWS region | \n |-----------------|-----------|\n | US-1 | us-west-1 |\n | US-2 | us-west-2 | \n | EU-1 | eu-central-1 ""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS CrowdStrike resources deployment"", ""isMultiLine"": true, ""fillWith"": [""CrowdStrike""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### Using your own S3 Bucket \n In order to use your own S3 bucket you can refernace the following guide [Use your own S3 bucket](https://falcon.us-2.crowdstrike.com/documentation/page/fa572b1c/falcon-data-replicator#g4f79236) or follow this steps: \n 1. Create support case with the following Name: **Using Self S3 bucket for FDR** \n 2. Add the following information: \n 2.1. The Falcon CID where your FDR feed is provisioned \n 2.2. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.3. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.4. Do not use any partitions. ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": "" | Event type | S3 prefix | \n |-----------------|-----------|\n | Primary Events | data/ |\n | Secondary Events | fdrv2/ ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-CrowdstrikeStream"", ""text"": ""Primary Events""}, {""key"": ""Custom-CrowdStrikeSecondary"", ""text"": ""Secondary Events""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeS3FDR_ccp/DataConnectorDefinition.json","true" +"CrowdStrike_Registry_Events_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeFalconS3CCPDefinition","Microsoft","CrowdStrike Falcon Data Replicator (AWS S3) (via Codeless Connector Framework)","The Crowdstrike Falcon Data Replicator (S3) connector provides the capability to ingest FDR event datainto Microsoft Sentinel from the AWS S3 bucket where the FDR logs have been streamed. The connector provides ability to get events from Falcon Agents which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector requires an IAM role to be configured on AWS to allow access to the AWS S3 bucket and may not be suitable for environments that leverage CrowdStrike - managed buckets.

3. For environments that leverage CrowdStrike-managed buckets, please configure the CrowdStrike Falcon Data Replicator (CrowdStrike-Managed AWS S3) connector.

","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Requirements: \n In order to use the Falcon Data Replicator feature the following are required: \n 1. **Subscription:** \n 1.1. Falcon Data Replicator. \n 1.2. Falcon Insight XDR. \n 2. **Roles:** \n 2.1. Falcon Administrator.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Setup your CrowdStrike & AWS environments \n To configure access on AWS, use the following two templates provided to set up the AWS environment. This will enable sending logs from an S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). \n 2. Choose the \u2018Specify template\u2019 option, then \u2018Upload a template file\u2019 by clicking on \u2018Choose file\u2019 and selecting the appropriate CloudFormation template file provided below. click \u2018Choose file\u2019 and select the downloaded template. \n 3. Click 'Next' and 'Create stack'.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""Make sure that your bucket will be created in the same AWS region as your Falcon CID where the FDR feed is provisioned. \n | CrowdStrike region | AWS region | \n |-----------------|-----------|\n | US-1 | us-west-1 |\n | US-2 | us-west-2 | \n | EU-1 | eu-central-1 ""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS CrowdStrike resources deployment"", ""isMultiLine"": true, ""fillWith"": [""CrowdStrike""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### Using your own S3 Bucket \n In order to use your own S3 bucket you can refernace the following guide [Use your own S3 bucket](https://falcon.us-2.crowdstrike.com/documentation/page/fa572b1c/falcon-data-replicator#g4f79236) or follow this steps: \n 1. Create support case with the following Name: **Using Self S3 bucket for FDR** \n 2. Add the following information: \n 2.1. The Falcon CID where your FDR feed is provisioned \n 2.2. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.3. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.4. Do not use any partitions. ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": "" | Event type | S3 prefix | \n |-----------------|-----------|\n | Primary Events | data/ |\n | Secondary Events | fdrv2/ ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-CrowdstrikeStream"", ""text"": ""Primary Events""}, {""key"": ""Custom-CrowdStrikeSecondary"", ""text"": ""Secondary Events""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeS3FDR_ccp/DataConnectorDefinition.json","true" +"CrowdStrike_Secondary_Data_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeFalconS3CCPDefinition","Microsoft","CrowdStrike Falcon Data Replicator (AWS S3) (via Codeless Connector Framework)","The Crowdstrike Falcon Data Replicator (S3) connector provides the capability to ingest FDR event datainto Microsoft Sentinel from the AWS S3 bucket where the FDR logs have been streamed. The connector provides ability to get events from Falcon Agents which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector requires an IAM role to be configured on AWS to allow access to the AWS S3 bucket and may not be suitable for environments that leverage CrowdStrike - managed buckets.

3. For environments that leverage CrowdStrike-managed buckets, please configure the CrowdStrike Falcon Data Replicator (CrowdStrike-Managed AWS S3) connector.

","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Requirements: \n In order to use the Falcon Data Replicator feature the following are required: \n 1. **Subscription:** \n 1.1. Falcon Data Replicator. \n 1.2. Falcon Insight XDR. \n 2. **Roles:** \n 2.1. Falcon Administrator.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Setup your CrowdStrike & AWS environments \n To configure access on AWS, use the following two templates provided to set up the AWS environment. This will enable sending logs from an S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). \n 2. Choose the \u2018Specify template\u2019 option, then \u2018Upload a template file\u2019 by clicking on \u2018Choose file\u2019 and selecting the appropriate CloudFormation template file provided below. click \u2018Choose file\u2019 and select the downloaded template. \n 3. Click 'Next' and 'Create stack'.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""Make sure that your bucket will be created in the same AWS region as your Falcon CID where the FDR feed is provisioned. \n | CrowdStrike region | AWS region | \n |-----------------|-----------|\n | US-1 | us-west-1 |\n | US-2 | us-west-2 | \n | EU-1 | eu-central-1 ""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS CrowdStrike resources deployment"", ""isMultiLine"": true, ""fillWith"": [""CrowdStrike""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### Using your own S3 Bucket \n In order to use your own S3 bucket you can refernace the following guide [Use your own S3 bucket](https://falcon.us-2.crowdstrike.com/documentation/page/fa572b1c/falcon-data-replicator#g4f79236) or follow this steps: \n 1. Create support case with the following Name: **Using Self S3 bucket for FDR** \n 2. Add the following information: \n 2.1. The Falcon CID where your FDR feed is provisioned \n 2.2. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.3. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.4. Do not use any partitions. ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": "" | Event type | S3 prefix | \n |-----------------|-----------|\n | Primary Events | data/ |\n | Secondary Events | fdrv2/ ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-CrowdstrikeStream"", ""text"": ""Primary Events""}, {""key"": ""Custom-CrowdStrikeSecondary"", ""text"": ""Secondary Events""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeS3FDR_ccp/DataConnectorDefinition.json","true" +"CrowdStrike_User_Events_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdStrikeFalconS3CCPDefinition","Microsoft","CrowdStrike Falcon Data Replicator (AWS S3) (via Codeless Connector Framework)","The Crowdstrike Falcon Data Replicator (S3) connector provides the capability to ingest FDR event datainto Microsoft Sentinel from the AWS S3 bucket where the FDR logs have been streamed. The connector provides ability to get events from Falcon Agents which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector requires an IAM role to be configured on AWS to allow access to the AWS S3 bucket and may not be suitable for environments that leverage CrowdStrike - managed buckets.

3. For environments that leverage CrowdStrike-managed buckets, please configure the CrowdStrike Falcon Data Replicator (CrowdStrike-Managed AWS S3) connector.

","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Requirements: \n In order to use the Falcon Data Replicator feature the following are required: \n 1. **Subscription:** \n 1.1. Falcon Data Replicator. \n 1.2. Falcon Insight XDR. \n 2. **Roles:** \n 2.1. Falcon Administrator.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Setup your CrowdStrike & AWS environments \n To configure access on AWS, use the following two templates provided to set up the AWS environment. This will enable sending logs from an S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create). \n 2. Choose the \u2018Specify template\u2019 option, then \u2018Upload a template file\u2019 by clicking on \u2018Choose file\u2019 and selecting the appropriate CloudFormation template file provided below. click \u2018Choose file\u2019 and select the downloaded template. \n 3. Click 'Next' and 'Create stack'.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""Make sure that your bucket will be created in the same AWS region as your Falcon CID where the FDR feed is provisioned. \n | CrowdStrike region | AWS region | \n |-----------------|-----------|\n | US-1 | us-west-1 |\n | US-2 | us-west-2 | \n | EU-1 | eu-central-1 ""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS CrowdStrike resources deployment"", ""isMultiLine"": true, ""fillWith"": [""CrowdStrike""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### Using your own S3 Bucket \n In order to use your own S3 bucket you can refernace the following guide [Use your own S3 bucket](https://falcon.us-2.crowdstrike.com/documentation/page/fa572b1c/falcon-data-replicator#g4f79236) or follow this steps: \n 1. Create support case with the following Name: **Using Self S3 bucket for FDR** \n 2. Add the following information: \n 2.1. The Falcon CID where your FDR feed is provisioned \n 2.2. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.3. Indicate which types of events you wish to have provided in this new FDR feed. \n 2.4. Do not use any partitions. ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": "" | Event type | S3 prefix | \n |-----------------|-----------|\n | Primary Events | data/ |\n | Secondary Events | fdrv2/ ""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-CrowdstrikeStream"", ""text"": ""Primary Events""}, {""key"": ""Custom-CrowdStrikeSecondary"", ""text"": ""Secondary Events""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdStrikeS3FDR_ccp/DataConnectorDefinition.json","true" +"ASimAuditEventLogs","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdstrikeReplicatorv2","Crowdstrike","CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)","This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the AWS SQS / S3 to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\n\n>**(Optional Step)** Securely store API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Prerequisites"", ""description"": ""1. Configure FDR in CrowdStrike - You must contact the [CrowdStrike support team](https://supportportal.crowdstrike.com/) to enable CrowdStrike FDR.\n\t - Once CrowdStrike FDR is enabled, from the CrowdStrike console, navigate to Support --> API Clients and Keys. \n\t - You need to Create new credentials to copy the AWS Access Key ID, AWS Secret Access Key, SQS Queue URL and AWS Region. \n2. Register AAD application - For DCR to authentiate to ingest data into log analytics, you must use AAD application. \n\t - [Follow the instructions here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-azure-ad-application) (steps 1-5) to get **AAD Tenant Id**, **AAD Client Id** and **AAD Client Secret**. \n\t - For **AAD Principal** Id of this application, access the AAD App through [AAD Portal](https://aad.portal.azure.com/#view/Microsoft_AAD_IAM/StartboardApplicationsMenuBlade/~/AppAppsPreview/menuId/) and capture Object Id from the application overview page.""}, {""title"": ""Deployment Options"", ""description"": ""Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Crowdstrike Falcon Data Replicator connector V2 using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-gov) \t\t\t\n2. Provide the required details such as Microsoft Sentinel Workspace, CrowdStrike AWS credentials, Azure AD Application details and ingestion configurations \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. It is recommended to create a new Resource Group for deployment of function app and associated resources.\n3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n4. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Crowdstrike Falcon Data Replicator connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy DCE, DCR and Custom Tables for data ingestion**\n\n1. Deploy the required DCE, DCR(s) and the Custom Tables by using the [Data Collection Resource ARM template](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-data-resource) \n2. After successful deployment of DCE and DCR(s), get the below information and keep it handy (required during Azure Functions app deployment).\n\t - DCE log ingestion - Follow the instructions available at [Create data collection endpoint](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-data-collection-endpoint) (Step 3).\n\t - Immutable Ids of one or more DCRs (as applicable) - Follow the instructions available at [Collect information from the DCR](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#collect-information-from-the-dcr) (Stpe 2).""}, {""title"": """", ""description"": ""**2. Deploy a Function App**\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tAWS_KEY\n\t\tAWS_SECRET\n\t\tAWS_REGION_NAME\n\t\tQUEUE_URL\n\t\tUSER_SELECTION_REQUIRE_RAW //True if raw data is required\n\t\tUSER_SELECTION_REQUIRE_SECONDARY //True if secondary data is required\n\t\tMAX_QUEUE_MESSAGES_MAIN_QUEUE // 100 for consumption and 150 for Premium\n\t\tMAX_SCRIPT_EXEC_TIME_MINUTES // add the value of 10 here\n\t\tAZURE_TENANT_ID\n\t\tAZURE_CLIENT_ID\n\t\tAZURE_CLIENT_SECRET\n\t\tDCE_INGESTION_ENDPOINT\n\t\tNORMALIZED_DCR_ID\n\t\tRAW_DATA_DCR_ID\n\t\tEVENT_TO_TABLE_MAPPING_LINK // File is present on github. Add if the file can be accessed using internet\n\t\tREQUIRED_FIELDS_SCHEMA_LINK //File is present on github. Add if the file can be accessed using internet\n\t\tSchedule //Add value as '0 */1 * * * *' to ensure the function runs every minute.\n5. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""SQS and AWS S3 account credentials/permissions"", ""description"": ""**AWS_SECRET**, **AWS_REGION_NAME**, **AWS_KEY**, **QUEUE_URL** is required. [See the documentation to learn more about data pulling](https://www.crowdstrike.com/blog/tech-center/intro-to-falcon-data-replicator/). To start, contact CrowdStrike support. At your request they will create a CrowdStrike managed Amazon Web Services (AWS) S3 bucket for short term storage purposes as well as a SQS (simple queue service) account for monitoring changes to the S3 bucket.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdstrikeReplicatorCLv2/CrowdstrikeReplicatorV2_ConnectorUI.json","true" +"ASimAuthenticationEventLogs","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdstrikeReplicatorv2","Crowdstrike","CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)","This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the AWS SQS / S3 to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\n\n>**(Optional Step)** Securely store API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Prerequisites"", ""description"": ""1. Configure FDR in CrowdStrike - You must contact the [CrowdStrike support team](https://supportportal.crowdstrike.com/) to enable CrowdStrike FDR.\n\t - Once CrowdStrike FDR is enabled, from the CrowdStrike console, navigate to Support --> API Clients and Keys. \n\t - You need to Create new credentials to copy the AWS Access Key ID, AWS Secret Access Key, SQS Queue URL and AWS Region. \n2. Register AAD application - For DCR to authentiate to ingest data into log analytics, you must use AAD application. \n\t - [Follow the instructions here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-azure-ad-application) (steps 1-5) to get **AAD Tenant Id**, **AAD Client Id** and **AAD Client Secret**. \n\t - For **AAD Principal** Id of this application, access the AAD App through [AAD Portal](https://aad.portal.azure.com/#view/Microsoft_AAD_IAM/StartboardApplicationsMenuBlade/~/AppAppsPreview/menuId/) and capture Object Id from the application overview page.""}, {""title"": ""Deployment Options"", ""description"": ""Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Crowdstrike Falcon Data Replicator connector V2 using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-gov) \t\t\t\n2. Provide the required details such as Microsoft Sentinel Workspace, CrowdStrike AWS credentials, Azure AD Application details and ingestion configurations \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. It is recommended to create a new Resource Group for deployment of function app and associated resources.\n3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n4. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Crowdstrike Falcon Data Replicator connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy DCE, DCR and Custom Tables for data ingestion**\n\n1. Deploy the required DCE, DCR(s) and the Custom Tables by using the [Data Collection Resource ARM template](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-data-resource) \n2. After successful deployment of DCE and DCR(s), get the below information and keep it handy (required during Azure Functions app deployment).\n\t - DCE log ingestion - Follow the instructions available at [Create data collection endpoint](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-data-collection-endpoint) (Step 3).\n\t - Immutable Ids of one or more DCRs (as applicable) - Follow the instructions available at [Collect information from the DCR](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#collect-information-from-the-dcr) (Stpe 2).""}, {""title"": """", ""description"": ""**2. Deploy a Function App**\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tAWS_KEY\n\t\tAWS_SECRET\n\t\tAWS_REGION_NAME\n\t\tQUEUE_URL\n\t\tUSER_SELECTION_REQUIRE_RAW //True if raw data is required\n\t\tUSER_SELECTION_REQUIRE_SECONDARY //True if secondary data is required\n\t\tMAX_QUEUE_MESSAGES_MAIN_QUEUE // 100 for consumption and 150 for Premium\n\t\tMAX_SCRIPT_EXEC_TIME_MINUTES // add the value of 10 here\n\t\tAZURE_TENANT_ID\n\t\tAZURE_CLIENT_ID\n\t\tAZURE_CLIENT_SECRET\n\t\tDCE_INGESTION_ENDPOINT\n\t\tNORMALIZED_DCR_ID\n\t\tRAW_DATA_DCR_ID\n\t\tEVENT_TO_TABLE_MAPPING_LINK // File is present on github. Add if the file can be accessed using internet\n\t\tREQUIRED_FIELDS_SCHEMA_LINK //File is present on github. Add if the file can be accessed using internet\n\t\tSchedule //Add value as '0 */1 * * * *' to ensure the function runs every minute.\n5. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""SQS and AWS S3 account credentials/permissions"", ""description"": ""**AWS_SECRET**, **AWS_REGION_NAME**, **AWS_KEY**, **QUEUE_URL** is required. [See the documentation to learn more about data pulling](https://www.crowdstrike.com/blog/tech-center/intro-to-falcon-data-replicator/). To start, contact CrowdStrike support. At your request they will create a CrowdStrike managed Amazon Web Services (AWS) S3 bucket for short term storage purposes as well as a SQS (simple queue service) account for monitoring changes to the S3 bucket.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdstrikeReplicatorCLv2/CrowdstrikeReplicatorV2_ConnectorUI.json","true" +"ASimAuthenticationEventLogs_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdstrikeReplicatorv2","Crowdstrike","CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)","This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the AWS SQS / S3 to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\n\n>**(Optional Step)** Securely store API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Prerequisites"", ""description"": ""1. Configure FDR in CrowdStrike - You must contact the [CrowdStrike support team](https://supportportal.crowdstrike.com/) to enable CrowdStrike FDR.\n\t - Once CrowdStrike FDR is enabled, from the CrowdStrike console, navigate to Support --> API Clients and Keys. \n\t - You need to Create new credentials to copy the AWS Access Key ID, AWS Secret Access Key, SQS Queue URL and AWS Region. \n2. Register AAD application - For DCR to authentiate to ingest data into log analytics, you must use AAD application. \n\t - [Follow the instructions here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-azure-ad-application) (steps 1-5) to get **AAD Tenant Id**, **AAD Client Id** and **AAD Client Secret**. \n\t - For **AAD Principal** Id of this application, access the AAD App through [AAD Portal](https://aad.portal.azure.com/#view/Microsoft_AAD_IAM/StartboardApplicationsMenuBlade/~/AppAppsPreview/menuId/) and capture Object Id from the application overview page.""}, {""title"": ""Deployment Options"", ""description"": ""Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Crowdstrike Falcon Data Replicator connector V2 using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-gov) \t\t\t\n2. Provide the required details such as Microsoft Sentinel Workspace, CrowdStrike AWS credentials, Azure AD Application details and ingestion configurations \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. It is recommended to create a new Resource Group for deployment of function app and associated resources.\n3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n4. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Crowdstrike Falcon Data Replicator connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy DCE, DCR and Custom Tables for data ingestion**\n\n1. Deploy the required DCE, DCR(s) and the Custom Tables by using the [Data Collection Resource ARM template](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-data-resource) \n2. After successful deployment of DCE and DCR(s), get the below information and keep it handy (required during Azure Functions app deployment).\n\t - DCE log ingestion - Follow the instructions available at [Create data collection endpoint](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-data-collection-endpoint) (Step 3).\n\t - Immutable Ids of one or more DCRs (as applicable) - Follow the instructions available at [Collect information from the DCR](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#collect-information-from-the-dcr) (Stpe 2).""}, {""title"": """", ""description"": ""**2. Deploy a Function App**\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tAWS_KEY\n\t\tAWS_SECRET\n\t\tAWS_REGION_NAME\n\t\tQUEUE_URL\n\t\tUSER_SELECTION_REQUIRE_RAW //True if raw data is required\n\t\tUSER_SELECTION_REQUIRE_SECONDARY //True if secondary data is required\n\t\tMAX_QUEUE_MESSAGES_MAIN_QUEUE // 100 for consumption and 150 for Premium\n\t\tMAX_SCRIPT_EXEC_TIME_MINUTES // add the value of 10 here\n\t\tAZURE_TENANT_ID\n\t\tAZURE_CLIENT_ID\n\t\tAZURE_CLIENT_SECRET\n\t\tDCE_INGESTION_ENDPOINT\n\t\tNORMALIZED_DCR_ID\n\t\tRAW_DATA_DCR_ID\n\t\tEVENT_TO_TABLE_MAPPING_LINK // File is present on github. Add if the file can be accessed using internet\n\t\tREQUIRED_FIELDS_SCHEMA_LINK //File is present on github. Add if the file can be accessed using internet\n\t\tSchedule //Add value as '0 */1 * * * *' to ensure the function runs every minute.\n5. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""SQS and AWS S3 account credentials/permissions"", ""description"": ""**AWS_SECRET**, **AWS_REGION_NAME**, **AWS_KEY**, **QUEUE_URL** is required. [See the documentation to learn more about data pulling](https://www.crowdstrike.com/blog/tech-center/intro-to-falcon-data-replicator/). To start, contact CrowdStrike support. At your request they will create a CrowdStrike managed Amazon Web Services (AWS) S3 bucket for short term storage purposes as well as a SQS (simple queue service) account for monitoring changes to the S3 bucket.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdstrikeReplicatorCLv2/CrowdstrikeReplicatorV2_ConnectorUI.json","true" +"ASimDnsActivityLogs","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdstrikeReplicatorv2","Crowdstrike","CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)","This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the AWS SQS / S3 to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\n\n>**(Optional Step)** Securely store API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Prerequisites"", ""description"": ""1. Configure FDR in CrowdStrike - You must contact the [CrowdStrike support team](https://supportportal.crowdstrike.com/) to enable CrowdStrike FDR.\n\t - Once CrowdStrike FDR is enabled, from the CrowdStrike console, navigate to Support --> API Clients and Keys. \n\t - You need to Create new credentials to copy the AWS Access Key ID, AWS Secret Access Key, SQS Queue URL and AWS Region. \n2. Register AAD application - For DCR to authentiate to ingest data into log analytics, you must use AAD application. \n\t - [Follow the instructions here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-azure-ad-application) (steps 1-5) to get **AAD Tenant Id**, **AAD Client Id** and **AAD Client Secret**. \n\t - For **AAD Principal** Id of this application, access the AAD App through [AAD Portal](https://aad.portal.azure.com/#view/Microsoft_AAD_IAM/StartboardApplicationsMenuBlade/~/AppAppsPreview/menuId/) and capture Object Id from the application overview page.""}, {""title"": ""Deployment Options"", ""description"": ""Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Crowdstrike Falcon Data Replicator connector V2 using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-gov) \t\t\t\n2. Provide the required details such as Microsoft Sentinel Workspace, CrowdStrike AWS credentials, Azure AD Application details and ingestion configurations \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. It is recommended to create a new Resource Group for deployment of function app and associated resources.\n3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n4. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Crowdstrike Falcon Data Replicator connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy DCE, DCR and Custom Tables for data ingestion**\n\n1. Deploy the required DCE, DCR(s) and the Custom Tables by using the [Data Collection Resource ARM template](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-data-resource) \n2. After successful deployment of DCE and DCR(s), get the below information and keep it handy (required during Azure Functions app deployment).\n\t - DCE log ingestion - Follow the instructions available at [Create data collection endpoint](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-data-collection-endpoint) (Step 3).\n\t - Immutable Ids of one or more DCRs (as applicable) - Follow the instructions available at [Collect information from the DCR](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#collect-information-from-the-dcr) (Stpe 2).""}, {""title"": """", ""description"": ""**2. Deploy a Function App**\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tAWS_KEY\n\t\tAWS_SECRET\n\t\tAWS_REGION_NAME\n\t\tQUEUE_URL\n\t\tUSER_SELECTION_REQUIRE_RAW //True if raw data is required\n\t\tUSER_SELECTION_REQUIRE_SECONDARY //True if secondary data is required\n\t\tMAX_QUEUE_MESSAGES_MAIN_QUEUE // 100 for consumption and 150 for Premium\n\t\tMAX_SCRIPT_EXEC_TIME_MINUTES // add the value of 10 here\n\t\tAZURE_TENANT_ID\n\t\tAZURE_CLIENT_ID\n\t\tAZURE_CLIENT_SECRET\n\t\tDCE_INGESTION_ENDPOINT\n\t\tNORMALIZED_DCR_ID\n\t\tRAW_DATA_DCR_ID\n\t\tEVENT_TO_TABLE_MAPPING_LINK // File is present on github. Add if the file can be accessed using internet\n\t\tREQUIRED_FIELDS_SCHEMA_LINK //File is present on github. Add if the file can be accessed using internet\n\t\tSchedule //Add value as '0 */1 * * * *' to ensure the function runs every minute.\n5. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""SQS and AWS S3 account credentials/permissions"", ""description"": ""**AWS_SECRET**, **AWS_REGION_NAME**, **AWS_KEY**, **QUEUE_URL** is required. [See the documentation to learn more about data pulling](https://www.crowdstrike.com/blog/tech-center/intro-to-falcon-data-replicator/). To start, contact CrowdStrike support. At your request they will create a CrowdStrike managed Amazon Web Services (AWS) S3 bucket for short term storage purposes as well as a SQS (simple queue service) account for monitoring changes to the S3 bucket.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdstrikeReplicatorCLv2/CrowdstrikeReplicatorV2_ConnectorUI.json","true" +"ASimFileEventLogs","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdstrikeReplicatorv2","Crowdstrike","CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)","This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the AWS SQS / S3 to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\n\n>**(Optional Step)** Securely store API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Prerequisites"", ""description"": ""1. Configure FDR in CrowdStrike - You must contact the [CrowdStrike support team](https://supportportal.crowdstrike.com/) to enable CrowdStrike FDR.\n\t - Once CrowdStrike FDR is enabled, from the CrowdStrike console, navigate to Support --> API Clients and Keys. \n\t - You need to Create new credentials to copy the AWS Access Key ID, AWS Secret Access Key, SQS Queue URL and AWS Region. \n2. Register AAD application - For DCR to authentiate to ingest data into log analytics, you must use AAD application. \n\t - [Follow the instructions here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-azure-ad-application) (steps 1-5) to get **AAD Tenant Id**, **AAD Client Id** and **AAD Client Secret**. \n\t - For **AAD Principal** Id of this application, access the AAD App through [AAD Portal](https://aad.portal.azure.com/#view/Microsoft_AAD_IAM/StartboardApplicationsMenuBlade/~/AppAppsPreview/menuId/) and capture Object Id from the application overview page.""}, {""title"": ""Deployment Options"", ""description"": ""Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Crowdstrike Falcon Data Replicator connector V2 using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-gov) \t\t\t\n2. Provide the required details such as Microsoft Sentinel Workspace, CrowdStrike AWS credentials, Azure AD Application details and ingestion configurations \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. It is recommended to create a new Resource Group for deployment of function app and associated resources.\n3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n4. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Crowdstrike Falcon Data Replicator connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy DCE, DCR and Custom Tables for data ingestion**\n\n1. Deploy the required DCE, DCR(s) and the Custom Tables by using the [Data Collection Resource ARM template](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-data-resource) \n2. After successful deployment of DCE and DCR(s), get the below information and keep it handy (required during Azure Functions app deployment).\n\t - DCE log ingestion - Follow the instructions available at [Create data collection endpoint](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-data-collection-endpoint) (Step 3).\n\t - Immutable Ids of one or more DCRs (as applicable) - Follow the instructions available at [Collect information from the DCR](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#collect-information-from-the-dcr) (Stpe 2).""}, {""title"": """", ""description"": ""**2. Deploy a Function App**\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tAWS_KEY\n\t\tAWS_SECRET\n\t\tAWS_REGION_NAME\n\t\tQUEUE_URL\n\t\tUSER_SELECTION_REQUIRE_RAW //True if raw data is required\n\t\tUSER_SELECTION_REQUIRE_SECONDARY //True if secondary data is required\n\t\tMAX_QUEUE_MESSAGES_MAIN_QUEUE // 100 for consumption and 150 for Premium\n\t\tMAX_SCRIPT_EXEC_TIME_MINUTES // add the value of 10 here\n\t\tAZURE_TENANT_ID\n\t\tAZURE_CLIENT_ID\n\t\tAZURE_CLIENT_SECRET\n\t\tDCE_INGESTION_ENDPOINT\n\t\tNORMALIZED_DCR_ID\n\t\tRAW_DATA_DCR_ID\n\t\tEVENT_TO_TABLE_MAPPING_LINK // File is present on github. Add if the file can be accessed using internet\n\t\tREQUIRED_FIELDS_SCHEMA_LINK //File is present on github. Add if the file can be accessed using internet\n\t\tSchedule //Add value as '0 */1 * * * *' to ensure the function runs every minute.\n5. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""SQS and AWS S3 account credentials/permissions"", ""description"": ""**AWS_SECRET**, **AWS_REGION_NAME**, **AWS_KEY**, **QUEUE_URL** is required. [See the documentation to learn more about data pulling](https://www.crowdstrike.com/blog/tech-center/intro-to-falcon-data-replicator/). To start, contact CrowdStrike support. At your request they will create a CrowdStrike managed Amazon Web Services (AWS) S3 bucket for short term storage purposes as well as a SQS (simple queue service) account for monitoring changes to the S3 bucket.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdstrikeReplicatorCLv2/CrowdstrikeReplicatorV2_ConnectorUI.json","true" +"ASimFileEventLogs_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdstrikeReplicatorv2","Crowdstrike","CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)","This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the AWS SQS / S3 to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\n\n>**(Optional Step)** Securely store API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Prerequisites"", ""description"": ""1. Configure FDR in CrowdStrike - You must contact the [CrowdStrike support team](https://supportportal.crowdstrike.com/) to enable CrowdStrike FDR.\n\t - Once CrowdStrike FDR is enabled, from the CrowdStrike console, navigate to Support --> API Clients and Keys. \n\t - You need to Create new credentials to copy the AWS Access Key ID, AWS Secret Access Key, SQS Queue URL and AWS Region. \n2. Register AAD application - For DCR to authentiate to ingest data into log analytics, you must use AAD application. \n\t - [Follow the instructions here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-azure-ad-application) (steps 1-5) to get **AAD Tenant Id**, **AAD Client Id** and **AAD Client Secret**. \n\t - For **AAD Principal** Id of this application, access the AAD App through [AAD Portal](https://aad.portal.azure.com/#view/Microsoft_AAD_IAM/StartboardApplicationsMenuBlade/~/AppAppsPreview/menuId/) and capture Object Id from the application overview page.""}, {""title"": ""Deployment Options"", ""description"": ""Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Crowdstrike Falcon Data Replicator connector V2 using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-gov) \t\t\t\n2. Provide the required details such as Microsoft Sentinel Workspace, CrowdStrike AWS credentials, Azure AD Application details and ingestion configurations \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. It is recommended to create a new Resource Group for deployment of function app and associated resources.\n3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n4. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Crowdstrike Falcon Data Replicator connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy DCE, DCR and Custom Tables for data ingestion**\n\n1. Deploy the required DCE, DCR(s) and the Custom Tables by using the [Data Collection Resource ARM template](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-data-resource) \n2. After successful deployment of DCE and DCR(s), get the below information and keep it handy (required during Azure Functions app deployment).\n\t - DCE log ingestion - Follow the instructions available at [Create data collection endpoint](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-data-collection-endpoint) (Step 3).\n\t - Immutable Ids of one or more DCRs (as applicable) - Follow the instructions available at [Collect information from the DCR](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#collect-information-from-the-dcr) (Stpe 2).""}, {""title"": """", ""description"": ""**2. Deploy a Function App**\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tAWS_KEY\n\t\tAWS_SECRET\n\t\tAWS_REGION_NAME\n\t\tQUEUE_URL\n\t\tUSER_SELECTION_REQUIRE_RAW //True if raw data is required\n\t\tUSER_SELECTION_REQUIRE_SECONDARY //True if secondary data is required\n\t\tMAX_QUEUE_MESSAGES_MAIN_QUEUE // 100 for consumption and 150 for Premium\n\t\tMAX_SCRIPT_EXEC_TIME_MINUTES // add the value of 10 here\n\t\tAZURE_TENANT_ID\n\t\tAZURE_CLIENT_ID\n\t\tAZURE_CLIENT_SECRET\n\t\tDCE_INGESTION_ENDPOINT\n\t\tNORMALIZED_DCR_ID\n\t\tRAW_DATA_DCR_ID\n\t\tEVENT_TO_TABLE_MAPPING_LINK // File is present on github. Add if the file can be accessed using internet\n\t\tREQUIRED_FIELDS_SCHEMA_LINK //File is present on github. Add if the file can be accessed using internet\n\t\tSchedule //Add value as '0 */1 * * * *' to ensure the function runs every minute.\n5. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""SQS and AWS S3 account credentials/permissions"", ""description"": ""**AWS_SECRET**, **AWS_REGION_NAME**, **AWS_KEY**, **QUEUE_URL** is required. [See the documentation to learn more about data pulling](https://www.crowdstrike.com/blog/tech-center/intro-to-falcon-data-replicator/). To start, contact CrowdStrike support. At your request they will create a CrowdStrike managed Amazon Web Services (AWS) S3 bucket for short term storage purposes as well as a SQS (simple queue service) account for monitoring changes to the S3 bucket.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdstrikeReplicatorCLv2/CrowdstrikeReplicatorV2_ConnectorUI.json","true" +"ASimNetworkSessionLogs","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdstrikeReplicatorv2","Crowdstrike","CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)","This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the AWS SQS / S3 to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\n\n>**(Optional Step)** Securely store API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Prerequisites"", ""description"": ""1. Configure FDR in CrowdStrike - You must contact the [CrowdStrike support team](https://supportportal.crowdstrike.com/) to enable CrowdStrike FDR.\n\t - Once CrowdStrike FDR is enabled, from the CrowdStrike console, navigate to Support --> API Clients and Keys. \n\t - You need to Create new credentials to copy the AWS Access Key ID, AWS Secret Access Key, SQS Queue URL and AWS Region. \n2. Register AAD application - For DCR to authentiate to ingest data into log analytics, you must use AAD application. \n\t - [Follow the instructions here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-azure-ad-application) (steps 1-5) to get **AAD Tenant Id**, **AAD Client Id** and **AAD Client Secret**. \n\t - For **AAD Principal** Id of this application, access the AAD App through [AAD Portal](https://aad.portal.azure.com/#view/Microsoft_AAD_IAM/StartboardApplicationsMenuBlade/~/AppAppsPreview/menuId/) and capture Object Id from the application overview page.""}, {""title"": ""Deployment Options"", ""description"": ""Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Crowdstrike Falcon Data Replicator connector V2 using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-gov) \t\t\t\n2. Provide the required details such as Microsoft Sentinel Workspace, CrowdStrike AWS credentials, Azure AD Application details and ingestion configurations \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. It is recommended to create a new Resource Group for deployment of function app and associated resources.\n3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n4. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Crowdstrike Falcon Data Replicator connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy DCE, DCR and Custom Tables for data ingestion**\n\n1. Deploy the required DCE, DCR(s) and the Custom Tables by using the [Data Collection Resource ARM template](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-data-resource) \n2. After successful deployment of DCE and DCR(s), get the below information and keep it handy (required during Azure Functions app deployment).\n\t - DCE log ingestion - Follow the instructions available at [Create data collection endpoint](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-data-collection-endpoint) (Step 3).\n\t - Immutable Ids of one or more DCRs (as applicable) - Follow the instructions available at [Collect information from the DCR](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#collect-information-from-the-dcr) (Stpe 2).""}, {""title"": """", ""description"": ""**2. Deploy a Function App**\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tAWS_KEY\n\t\tAWS_SECRET\n\t\tAWS_REGION_NAME\n\t\tQUEUE_URL\n\t\tUSER_SELECTION_REQUIRE_RAW //True if raw data is required\n\t\tUSER_SELECTION_REQUIRE_SECONDARY //True if secondary data is required\n\t\tMAX_QUEUE_MESSAGES_MAIN_QUEUE // 100 for consumption and 150 for Premium\n\t\tMAX_SCRIPT_EXEC_TIME_MINUTES // add the value of 10 here\n\t\tAZURE_TENANT_ID\n\t\tAZURE_CLIENT_ID\n\t\tAZURE_CLIENT_SECRET\n\t\tDCE_INGESTION_ENDPOINT\n\t\tNORMALIZED_DCR_ID\n\t\tRAW_DATA_DCR_ID\n\t\tEVENT_TO_TABLE_MAPPING_LINK // File is present on github. Add if the file can be accessed using internet\n\t\tREQUIRED_FIELDS_SCHEMA_LINK //File is present on github. Add if the file can be accessed using internet\n\t\tSchedule //Add value as '0 */1 * * * *' to ensure the function runs every minute.\n5. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""SQS and AWS S3 account credentials/permissions"", ""description"": ""**AWS_SECRET**, **AWS_REGION_NAME**, **AWS_KEY**, **QUEUE_URL** is required. [See the documentation to learn more about data pulling](https://www.crowdstrike.com/blog/tech-center/intro-to-falcon-data-replicator/). To start, contact CrowdStrike support. At your request they will create a CrowdStrike managed Amazon Web Services (AWS) S3 bucket for short term storage purposes as well as a SQS (simple queue service) account for monitoring changes to the S3 bucket.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdstrikeReplicatorCLv2/CrowdstrikeReplicatorV2_ConnectorUI.json","true" +"ASimProcessEventLogs","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdstrikeReplicatorv2","Crowdstrike","CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)","This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the AWS SQS / S3 to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\n\n>**(Optional Step)** Securely store API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Prerequisites"", ""description"": ""1. Configure FDR in CrowdStrike - You must contact the [CrowdStrike support team](https://supportportal.crowdstrike.com/) to enable CrowdStrike FDR.\n\t - Once CrowdStrike FDR is enabled, from the CrowdStrike console, navigate to Support --> API Clients and Keys. \n\t - You need to Create new credentials to copy the AWS Access Key ID, AWS Secret Access Key, SQS Queue URL and AWS Region. \n2. Register AAD application - For DCR to authentiate to ingest data into log analytics, you must use AAD application. \n\t - [Follow the instructions here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-azure-ad-application) (steps 1-5) to get **AAD Tenant Id**, **AAD Client Id** and **AAD Client Secret**. \n\t - For **AAD Principal** Id of this application, access the AAD App through [AAD Portal](https://aad.portal.azure.com/#view/Microsoft_AAD_IAM/StartboardApplicationsMenuBlade/~/AppAppsPreview/menuId/) and capture Object Id from the application overview page.""}, {""title"": ""Deployment Options"", ""description"": ""Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Crowdstrike Falcon Data Replicator connector V2 using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-gov) \t\t\t\n2. Provide the required details such as Microsoft Sentinel Workspace, CrowdStrike AWS credentials, Azure AD Application details and ingestion configurations \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. It is recommended to create a new Resource Group for deployment of function app and associated resources.\n3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n4. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Crowdstrike Falcon Data Replicator connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy DCE, DCR and Custom Tables for data ingestion**\n\n1. Deploy the required DCE, DCR(s) and the Custom Tables by using the [Data Collection Resource ARM template](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-data-resource) \n2. After successful deployment of DCE and DCR(s), get the below information and keep it handy (required during Azure Functions app deployment).\n\t - DCE log ingestion - Follow the instructions available at [Create data collection endpoint](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-data-collection-endpoint) (Step 3).\n\t - Immutable Ids of one or more DCRs (as applicable) - Follow the instructions available at [Collect information from the DCR](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#collect-information-from-the-dcr) (Stpe 2).""}, {""title"": """", ""description"": ""**2. Deploy a Function App**\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tAWS_KEY\n\t\tAWS_SECRET\n\t\tAWS_REGION_NAME\n\t\tQUEUE_URL\n\t\tUSER_SELECTION_REQUIRE_RAW //True if raw data is required\n\t\tUSER_SELECTION_REQUIRE_SECONDARY //True if secondary data is required\n\t\tMAX_QUEUE_MESSAGES_MAIN_QUEUE // 100 for consumption and 150 for Premium\n\t\tMAX_SCRIPT_EXEC_TIME_MINUTES // add the value of 10 here\n\t\tAZURE_TENANT_ID\n\t\tAZURE_CLIENT_ID\n\t\tAZURE_CLIENT_SECRET\n\t\tDCE_INGESTION_ENDPOINT\n\t\tNORMALIZED_DCR_ID\n\t\tRAW_DATA_DCR_ID\n\t\tEVENT_TO_TABLE_MAPPING_LINK // File is present on github. Add if the file can be accessed using internet\n\t\tREQUIRED_FIELDS_SCHEMA_LINK //File is present on github. Add if the file can be accessed using internet\n\t\tSchedule //Add value as '0 */1 * * * *' to ensure the function runs every minute.\n5. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""SQS and AWS S3 account credentials/permissions"", ""description"": ""**AWS_SECRET**, **AWS_REGION_NAME**, **AWS_KEY**, **QUEUE_URL** is required. [See the documentation to learn more about data pulling](https://www.crowdstrike.com/blog/tech-center/intro-to-falcon-data-replicator/). To start, contact CrowdStrike support. At your request they will create a CrowdStrike managed Amazon Web Services (AWS) S3 bucket for short term storage purposes as well as a SQS (simple queue service) account for monitoring changes to the S3 bucket.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdstrikeReplicatorCLv2/CrowdstrikeReplicatorV2_ConnectorUI.json","true" +"ASimProcessEventLogs_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdstrikeReplicatorv2","Crowdstrike","CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)","This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the AWS SQS / S3 to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\n\n>**(Optional Step)** Securely store API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Prerequisites"", ""description"": ""1. Configure FDR in CrowdStrike - You must contact the [CrowdStrike support team](https://supportportal.crowdstrike.com/) to enable CrowdStrike FDR.\n\t - Once CrowdStrike FDR is enabled, from the CrowdStrike console, navigate to Support --> API Clients and Keys. \n\t - You need to Create new credentials to copy the AWS Access Key ID, AWS Secret Access Key, SQS Queue URL and AWS Region. \n2. Register AAD application - For DCR to authentiate to ingest data into log analytics, you must use AAD application. \n\t - [Follow the instructions here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-azure-ad-application) (steps 1-5) to get **AAD Tenant Id**, **AAD Client Id** and **AAD Client Secret**. \n\t - For **AAD Principal** Id of this application, access the AAD App through [AAD Portal](https://aad.portal.azure.com/#view/Microsoft_AAD_IAM/StartboardApplicationsMenuBlade/~/AppAppsPreview/menuId/) and capture Object Id from the application overview page.""}, {""title"": ""Deployment Options"", ""description"": ""Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Crowdstrike Falcon Data Replicator connector V2 using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-gov) \t\t\t\n2. Provide the required details such as Microsoft Sentinel Workspace, CrowdStrike AWS credentials, Azure AD Application details and ingestion configurations \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. It is recommended to create a new Resource Group for deployment of function app and associated resources.\n3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n4. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Crowdstrike Falcon Data Replicator connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy DCE, DCR and Custom Tables for data ingestion**\n\n1. Deploy the required DCE, DCR(s) and the Custom Tables by using the [Data Collection Resource ARM template](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-data-resource) \n2. After successful deployment of DCE and DCR(s), get the below information and keep it handy (required during Azure Functions app deployment).\n\t - DCE log ingestion - Follow the instructions available at [Create data collection endpoint](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-data-collection-endpoint) (Step 3).\n\t - Immutable Ids of one or more DCRs (as applicable) - Follow the instructions available at [Collect information from the DCR](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#collect-information-from-the-dcr) (Stpe 2).""}, {""title"": """", ""description"": ""**2. Deploy a Function App**\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tAWS_KEY\n\t\tAWS_SECRET\n\t\tAWS_REGION_NAME\n\t\tQUEUE_URL\n\t\tUSER_SELECTION_REQUIRE_RAW //True if raw data is required\n\t\tUSER_SELECTION_REQUIRE_SECONDARY //True if secondary data is required\n\t\tMAX_QUEUE_MESSAGES_MAIN_QUEUE // 100 for consumption and 150 for Premium\n\t\tMAX_SCRIPT_EXEC_TIME_MINUTES // add the value of 10 here\n\t\tAZURE_TENANT_ID\n\t\tAZURE_CLIENT_ID\n\t\tAZURE_CLIENT_SECRET\n\t\tDCE_INGESTION_ENDPOINT\n\t\tNORMALIZED_DCR_ID\n\t\tRAW_DATA_DCR_ID\n\t\tEVENT_TO_TABLE_MAPPING_LINK // File is present on github. Add if the file can be accessed using internet\n\t\tREQUIRED_FIELDS_SCHEMA_LINK //File is present on github. Add if the file can be accessed using internet\n\t\tSchedule //Add value as '0 */1 * * * *' to ensure the function runs every minute.\n5. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""SQS and AWS S3 account credentials/permissions"", ""description"": ""**AWS_SECRET**, **AWS_REGION_NAME**, **AWS_KEY**, **QUEUE_URL** is required. [See the documentation to learn more about data pulling](https://www.crowdstrike.com/blog/tech-center/intro-to-falcon-data-replicator/). To start, contact CrowdStrike support. At your request they will create a CrowdStrike managed Amazon Web Services (AWS) S3 bucket for short term storage purposes as well as a SQS (simple queue service) account for monitoring changes to the S3 bucket.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdstrikeReplicatorCLv2/CrowdstrikeReplicatorV2_ConnectorUI.json","true" +"ASimRegistryEventLogs","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdstrikeReplicatorv2","Crowdstrike","CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)","This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the AWS SQS / S3 to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\n\n>**(Optional Step)** Securely store API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Prerequisites"", ""description"": ""1. Configure FDR in CrowdStrike - You must contact the [CrowdStrike support team](https://supportportal.crowdstrike.com/) to enable CrowdStrike FDR.\n\t - Once CrowdStrike FDR is enabled, from the CrowdStrike console, navigate to Support --> API Clients and Keys. \n\t - You need to Create new credentials to copy the AWS Access Key ID, AWS Secret Access Key, SQS Queue URL and AWS Region. \n2. Register AAD application - For DCR to authentiate to ingest data into log analytics, you must use AAD application. \n\t - [Follow the instructions here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-azure-ad-application) (steps 1-5) to get **AAD Tenant Id**, **AAD Client Id** and **AAD Client Secret**. \n\t - For **AAD Principal** Id of this application, access the AAD App through [AAD Portal](https://aad.portal.azure.com/#view/Microsoft_AAD_IAM/StartboardApplicationsMenuBlade/~/AppAppsPreview/menuId/) and capture Object Id from the application overview page.""}, {""title"": ""Deployment Options"", ""description"": ""Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Crowdstrike Falcon Data Replicator connector V2 using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-gov) \t\t\t\n2. Provide the required details such as Microsoft Sentinel Workspace, CrowdStrike AWS credentials, Azure AD Application details and ingestion configurations \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. It is recommended to create a new Resource Group for deployment of function app and associated resources.\n3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n4. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Crowdstrike Falcon Data Replicator connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy DCE, DCR and Custom Tables for data ingestion**\n\n1. Deploy the required DCE, DCR(s) and the Custom Tables by using the [Data Collection Resource ARM template](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-data-resource) \n2. After successful deployment of DCE and DCR(s), get the below information and keep it handy (required during Azure Functions app deployment).\n\t - DCE log ingestion - Follow the instructions available at [Create data collection endpoint](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-data-collection-endpoint) (Step 3).\n\t - Immutable Ids of one or more DCRs (as applicable) - Follow the instructions available at [Collect information from the DCR](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#collect-information-from-the-dcr) (Stpe 2).""}, {""title"": """", ""description"": ""**2. Deploy a Function App**\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tAWS_KEY\n\t\tAWS_SECRET\n\t\tAWS_REGION_NAME\n\t\tQUEUE_URL\n\t\tUSER_SELECTION_REQUIRE_RAW //True if raw data is required\n\t\tUSER_SELECTION_REQUIRE_SECONDARY //True if secondary data is required\n\t\tMAX_QUEUE_MESSAGES_MAIN_QUEUE // 100 for consumption and 150 for Premium\n\t\tMAX_SCRIPT_EXEC_TIME_MINUTES // add the value of 10 here\n\t\tAZURE_TENANT_ID\n\t\tAZURE_CLIENT_ID\n\t\tAZURE_CLIENT_SECRET\n\t\tDCE_INGESTION_ENDPOINT\n\t\tNORMALIZED_DCR_ID\n\t\tRAW_DATA_DCR_ID\n\t\tEVENT_TO_TABLE_MAPPING_LINK // File is present on github. Add if the file can be accessed using internet\n\t\tREQUIRED_FIELDS_SCHEMA_LINK //File is present on github. Add if the file can be accessed using internet\n\t\tSchedule //Add value as '0 */1 * * * *' to ensure the function runs every minute.\n5. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""SQS and AWS S3 account credentials/permissions"", ""description"": ""**AWS_SECRET**, **AWS_REGION_NAME**, **AWS_KEY**, **QUEUE_URL** is required. [See the documentation to learn more about data pulling](https://www.crowdstrike.com/blog/tech-center/intro-to-falcon-data-replicator/). To start, contact CrowdStrike support. At your request they will create a CrowdStrike managed Amazon Web Services (AWS) S3 bucket for short term storage purposes as well as a SQS (simple queue service) account for monitoring changes to the S3 bucket.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdstrikeReplicatorCLv2/CrowdstrikeReplicatorV2_ConnectorUI.json","true" +"ASimRegistryEventLogs_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdstrikeReplicatorv2","Crowdstrike","CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)","This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the AWS SQS / S3 to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\n\n>**(Optional Step)** Securely store API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Prerequisites"", ""description"": ""1. Configure FDR in CrowdStrike - You must contact the [CrowdStrike support team](https://supportportal.crowdstrike.com/) to enable CrowdStrike FDR.\n\t - Once CrowdStrike FDR is enabled, from the CrowdStrike console, navigate to Support --> API Clients and Keys. \n\t - You need to Create new credentials to copy the AWS Access Key ID, AWS Secret Access Key, SQS Queue URL and AWS Region. \n2. Register AAD application - For DCR to authentiate to ingest data into log analytics, you must use AAD application. \n\t - [Follow the instructions here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-azure-ad-application) (steps 1-5) to get **AAD Tenant Id**, **AAD Client Id** and **AAD Client Secret**. \n\t - For **AAD Principal** Id of this application, access the AAD App through [AAD Portal](https://aad.portal.azure.com/#view/Microsoft_AAD_IAM/StartboardApplicationsMenuBlade/~/AppAppsPreview/menuId/) and capture Object Id from the application overview page.""}, {""title"": ""Deployment Options"", ""description"": ""Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Crowdstrike Falcon Data Replicator connector V2 using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-gov) \t\t\t\n2. Provide the required details such as Microsoft Sentinel Workspace, CrowdStrike AWS credentials, Azure AD Application details and ingestion configurations \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. It is recommended to create a new Resource Group for deployment of function app and associated resources.\n3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n4. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Crowdstrike Falcon Data Replicator connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy DCE, DCR and Custom Tables for data ingestion**\n\n1. Deploy the required DCE, DCR(s) and the Custom Tables by using the [Data Collection Resource ARM template](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-data-resource) \n2. After successful deployment of DCE and DCR(s), get the below information and keep it handy (required during Azure Functions app deployment).\n\t - DCE log ingestion - Follow the instructions available at [Create data collection endpoint](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-data-collection-endpoint) (Step 3).\n\t - Immutable Ids of one or more DCRs (as applicable) - Follow the instructions available at [Collect information from the DCR](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#collect-information-from-the-dcr) (Stpe 2).""}, {""title"": """", ""description"": ""**2. Deploy a Function App**\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tAWS_KEY\n\t\tAWS_SECRET\n\t\tAWS_REGION_NAME\n\t\tQUEUE_URL\n\t\tUSER_SELECTION_REQUIRE_RAW //True if raw data is required\n\t\tUSER_SELECTION_REQUIRE_SECONDARY //True if secondary data is required\n\t\tMAX_QUEUE_MESSAGES_MAIN_QUEUE // 100 for consumption and 150 for Premium\n\t\tMAX_SCRIPT_EXEC_TIME_MINUTES // add the value of 10 here\n\t\tAZURE_TENANT_ID\n\t\tAZURE_CLIENT_ID\n\t\tAZURE_CLIENT_SECRET\n\t\tDCE_INGESTION_ENDPOINT\n\t\tNORMALIZED_DCR_ID\n\t\tRAW_DATA_DCR_ID\n\t\tEVENT_TO_TABLE_MAPPING_LINK // File is present on github. Add if the file can be accessed using internet\n\t\tREQUIRED_FIELDS_SCHEMA_LINK //File is present on github. Add if the file can be accessed using internet\n\t\tSchedule //Add value as '0 */1 * * * *' to ensure the function runs every minute.\n5. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""SQS and AWS S3 account credentials/permissions"", ""description"": ""**AWS_SECRET**, **AWS_REGION_NAME**, **AWS_KEY**, **QUEUE_URL** is required. [See the documentation to learn more about data pulling](https://www.crowdstrike.com/blog/tech-center/intro-to-falcon-data-replicator/). To start, contact CrowdStrike support. At your request they will create a CrowdStrike managed Amazon Web Services (AWS) S3 bucket for short term storage purposes as well as a SQS (simple queue service) account for monitoring changes to the S3 bucket.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdstrikeReplicatorCLv2/CrowdstrikeReplicatorV2_ConnectorUI.json","true" +"ASimUserManagementActivityLogs","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdstrikeReplicatorv2","Crowdstrike","CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)","This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the AWS SQS / S3 to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\n\n>**(Optional Step)** Securely store API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Prerequisites"", ""description"": ""1. Configure FDR in CrowdStrike - You must contact the [CrowdStrike support team](https://supportportal.crowdstrike.com/) to enable CrowdStrike FDR.\n\t - Once CrowdStrike FDR is enabled, from the CrowdStrike console, navigate to Support --> API Clients and Keys. \n\t - You need to Create new credentials to copy the AWS Access Key ID, AWS Secret Access Key, SQS Queue URL and AWS Region. \n2. Register AAD application - For DCR to authentiate to ingest data into log analytics, you must use AAD application. \n\t - [Follow the instructions here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-azure-ad-application) (steps 1-5) to get **AAD Tenant Id**, **AAD Client Id** and **AAD Client Secret**. \n\t - For **AAD Principal** Id of this application, access the AAD App through [AAD Portal](https://aad.portal.azure.com/#view/Microsoft_AAD_IAM/StartboardApplicationsMenuBlade/~/AppAppsPreview/menuId/) and capture Object Id from the application overview page.""}, {""title"": ""Deployment Options"", ""description"": ""Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Crowdstrike Falcon Data Replicator connector V2 using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-gov) \t\t\t\n2. Provide the required details such as Microsoft Sentinel Workspace, CrowdStrike AWS credentials, Azure AD Application details and ingestion configurations \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. It is recommended to create a new Resource Group for deployment of function app and associated resources.\n3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n4. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Crowdstrike Falcon Data Replicator connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy DCE, DCR and Custom Tables for data ingestion**\n\n1. Deploy the required DCE, DCR(s) and the Custom Tables by using the [Data Collection Resource ARM template](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-data-resource) \n2. After successful deployment of DCE and DCR(s), get the below information and keep it handy (required during Azure Functions app deployment).\n\t - DCE log ingestion - Follow the instructions available at [Create data collection endpoint](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-data-collection-endpoint) (Step 3).\n\t - Immutable Ids of one or more DCRs (as applicable) - Follow the instructions available at [Collect information from the DCR](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#collect-information-from-the-dcr) (Stpe 2).""}, {""title"": """", ""description"": ""**2. Deploy a Function App**\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tAWS_KEY\n\t\tAWS_SECRET\n\t\tAWS_REGION_NAME\n\t\tQUEUE_URL\n\t\tUSER_SELECTION_REQUIRE_RAW //True if raw data is required\n\t\tUSER_SELECTION_REQUIRE_SECONDARY //True if secondary data is required\n\t\tMAX_QUEUE_MESSAGES_MAIN_QUEUE // 100 for consumption and 150 for Premium\n\t\tMAX_SCRIPT_EXEC_TIME_MINUTES // add the value of 10 here\n\t\tAZURE_TENANT_ID\n\t\tAZURE_CLIENT_ID\n\t\tAZURE_CLIENT_SECRET\n\t\tDCE_INGESTION_ENDPOINT\n\t\tNORMALIZED_DCR_ID\n\t\tRAW_DATA_DCR_ID\n\t\tEVENT_TO_TABLE_MAPPING_LINK // File is present on github. Add if the file can be accessed using internet\n\t\tREQUIRED_FIELDS_SCHEMA_LINK //File is present on github. Add if the file can be accessed using internet\n\t\tSchedule //Add value as '0 */1 * * * *' to ensure the function runs every minute.\n5. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""SQS and AWS S3 account credentials/permissions"", ""description"": ""**AWS_SECRET**, **AWS_REGION_NAME**, **AWS_KEY**, **QUEUE_URL** is required. [See the documentation to learn more about data pulling](https://www.crowdstrike.com/blog/tech-center/intro-to-falcon-data-replicator/). To start, contact CrowdStrike support. At your request they will create a CrowdStrike managed Amazon Web Services (AWS) S3 bucket for short term storage purposes as well as a SQS (simple queue service) account for monitoring changes to the S3 bucket.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdstrikeReplicatorCLv2/CrowdstrikeReplicatorV2_ConnectorUI.json","true" +"ASimUserManagementLogs_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdstrikeReplicatorv2","Crowdstrike","CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)","This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the AWS SQS / S3 to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\n\n>**(Optional Step)** Securely store API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Prerequisites"", ""description"": ""1. Configure FDR in CrowdStrike - You must contact the [CrowdStrike support team](https://supportportal.crowdstrike.com/) to enable CrowdStrike FDR.\n\t - Once CrowdStrike FDR is enabled, from the CrowdStrike console, navigate to Support --> API Clients and Keys. \n\t - You need to Create new credentials to copy the AWS Access Key ID, AWS Secret Access Key, SQS Queue URL and AWS Region. \n2. Register AAD application - For DCR to authentiate to ingest data into log analytics, you must use AAD application. \n\t - [Follow the instructions here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-azure-ad-application) (steps 1-5) to get **AAD Tenant Id**, **AAD Client Id** and **AAD Client Secret**. \n\t - For **AAD Principal** Id of this application, access the AAD App through [AAD Portal](https://aad.portal.azure.com/#view/Microsoft_AAD_IAM/StartboardApplicationsMenuBlade/~/AppAppsPreview/menuId/) and capture Object Id from the application overview page.""}, {""title"": ""Deployment Options"", ""description"": ""Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Crowdstrike Falcon Data Replicator connector V2 using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-gov) \t\t\t\n2. Provide the required details such as Microsoft Sentinel Workspace, CrowdStrike AWS credentials, Azure AD Application details and ingestion configurations \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. It is recommended to create a new Resource Group for deployment of function app and associated resources.\n3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n4. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Crowdstrike Falcon Data Replicator connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy DCE, DCR and Custom Tables for data ingestion**\n\n1. Deploy the required DCE, DCR(s) and the Custom Tables by using the [Data Collection Resource ARM template](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-data-resource) \n2. After successful deployment of DCE and DCR(s), get the below information and keep it handy (required during Azure Functions app deployment).\n\t - DCE log ingestion - Follow the instructions available at [Create data collection endpoint](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-data-collection-endpoint) (Step 3).\n\t - Immutable Ids of one or more DCRs (as applicable) - Follow the instructions available at [Collect information from the DCR](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#collect-information-from-the-dcr) (Stpe 2).""}, {""title"": """", ""description"": ""**2. Deploy a Function App**\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tAWS_KEY\n\t\tAWS_SECRET\n\t\tAWS_REGION_NAME\n\t\tQUEUE_URL\n\t\tUSER_SELECTION_REQUIRE_RAW //True if raw data is required\n\t\tUSER_SELECTION_REQUIRE_SECONDARY //True if secondary data is required\n\t\tMAX_QUEUE_MESSAGES_MAIN_QUEUE // 100 for consumption and 150 for Premium\n\t\tMAX_SCRIPT_EXEC_TIME_MINUTES // add the value of 10 here\n\t\tAZURE_TENANT_ID\n\t\tAZURE_CLIENT_ID\n\t\tAZURE_CLIENT_SECRET\n\t\tDCE_INGESTION_ENDPOINT\n\t\tNORMALIZED_DCR_ID\n\t\tRAW_DATA_DCR_ID\n\t\tEVENT_TO_TABLE_MAPPING_LINK // File is present on github. Add if the file can be accessed using internet\n\t\tREQUIRED_FIELDS_SCHEMA_LINK //File is present on github. Add if the file can be accessed using internet\n\t\tSchedule //Add value as '0 */1 * * * *' to ensure the function runs every minute.\n5. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""SQS and AWS S3 account credentials/permissions"", ""description"": ""**AWS_SECRET**, **AWS_REGION_NAME**, **AWS_KEY**, **QUEUE_URL** is required. [See the documentation to learn more about data pulling](https://www.crowdstrike.com/blog/tech-center/intro-to-falcon-data-replicator/). To start, contact CrowdStrike support. At your request they will create a CrowdStrike managed Amazon Web Services (AWS) S3 bucket for short term storage purposes as well as a SQS (simple queue service) account for monitoring changes to the S3 bucket.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdstrikeReplicatorCLv2/CrowdstrikeReplicatorV2_ConnectorUI.json","true" +"CrowdStrike_Additional_Events_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdstrikeReplicatorv2","Crowdstrike","CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)","This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the AWS SQS / S3 to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\n\n>**(Optional Step)** Securely store API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Prerequisites"", ""description"": ""1. Configure FDR in CrowdStrike - You must contact the [CrowdStrike support team](https://supportportal.crowdstrike.com/) to enable CrowdStrike FDR.\n\t - Once CrowdStrike FDR is enabled, from the CrowdStrike console, navigate to Support --> API Clients and Keys. \n\t - You need to Create new credentials to copy the AWS Access Key ID, AWS Secret Access Key, SQS Queue URL and AWS Region. \n2. Register AAD application - For DCR to authentiate to ingest data into log analytics, you must use AAD application. \n\t - [Follow the instructions here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-azure-ad-application) (steps 1-5) to get **AAD Tenant Id**, **AAD Client Id** and **AAD Client Secret**. \n\t - For **AAD Principal** Id of this application, access the AAD App through [AAD Portal](https://aad.portal.azure.com/#view/Microsoft_AAD_IAM/StartboardApplicationsMenuBlade/~/AppAppsPreview/menuId/) and capture Object Id from the application overview page.""}, {""title"": ""Deployment Options"", ""description"": ""Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Crowdstrike Falcon Data Replicator connector V2 using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-gov) \t\t\t\n2. Provide the required details such as Microsoft Sentinel Workspace, CrowdStrike AWS credentials, Azure AD Application details and ingestion configurations \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. It is recommended to create a new Resource Group for deployment of function app and associated resources.\n3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n4. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Crowdstrike Falcon Data Replicator connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy DCE, DCR and Custom Tables for data ingestion**\n\n1. Deploy the required DCE, DCR(s) and the Custom Tables by using the [Data Collection Resource ARM template](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-data-resource) \n2. After successful deployment of DCE and DCR(s), get the below information and keep it handy (required during Azure Functions app deployment).\n\t - DCE log ingestion - Follow the instructions available at [Create data collection endpoint](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-data-collection-endpoint) (Step 3).\n\t - Immutable Ids of one or more DCRs (as applicable) - Follow the instructions available at [Collect information from the DCR](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#collect-information-from-the-dcr) (Stpe 2).""}, {""title"": """", ""description"": ""**2. Deploy a Function App**\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tAWS_KEY\n\t\tAWS_SECRET\n\t\tAWS_REGION_NAME\n\t\tQUEUE_URL\n\t\tUSER_SELECTION_REQUIRE_RAW //True if raw data is required\n\t\tUSER_SELECTION_REQUIRE_SECONDARY //True if secondary data is required\n\t\tMAX_QUEUE_MESSAGES_MAIN_QUEUE // 100 for consumption and 150 for Premium\n\t\tMAX_SCRIPT_EXEC_TIME_MINUTES // add the value of 10 here\n\t\tAZURE_TENANT_ID\n\t\tAZURE_CLIENT_ID\n\t\tAZURE_CLIENT_SECRET\n\t\tDCE_INGESTION_ENDPOINT\n\t\tNORMALIZED_DCR_ID\n\t\tRAW_DATA_DCR_ID\n\t\tEVENT_TO_TABLE_MAPPING_LINK // File is present on github. Add if the file can be accessed using internet\n\t\tREQUIRED_FIELDS_SCHEMA_LINK //File is present on github. Add if the file can be accessed using internet\n\t\tSchedule //Add value as '0 */1 * * * *' to ensure the function runs every minute.\n5. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""SQS and AWS S3 account credentials/permissions"", ""description"": ""**AWS_SECRET**, **AWS_REGION_NAME**, **AWS_KEY**, **QUEUE_URL** is required. [See the documentation to learn more about data pulling](https://www.crowdstrike.com/blog/tech-center/intro-to-falcon-data-replicator/). To start, contact CrowdStrike support. At your request they will create a CrowdStrike managed Amazon Web Services (AWS) S3 bucket for short term storage purposes as well as a SQS (simple queue service) account for monitoring changes to the S3 bucket.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdstrikeReplicatorCLv2/CrowdstrikeReplicatorV2_ConnectorUI.json","true" +"CrowdStrike_Secondary_Data_CL","CrowdStrike Falcon Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-crowdstrikefalconep","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CrowdstrikeReplicatorv2","Crowdstrike","CrowdStrike Falcon Data Replicator (CrowdStrike Managed AWS-S3)","This connector enables the ingestion of FDR data into Microsoft Sentinel using Azure Functions to support the assessment of potential security risks, analysis of collaboration activities, identification of configuration issues, and other operational insights.

NOTE:

1. CrowdStrike FDR license must be available & enabled.

2. The connector uses a Key & Secret based authentication and is suitable for CrowdStrike Managed buckets.

3. For environments that use a fully owned AWS S3 bucket, Microsoft recommends using the CrowdStrike Falcon Data Replicator (AWS S3) connector.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the AWS SQS / S3 to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\n\n>**(Optional Step)** Securely store API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Prerequisites"", ""description"": ""1. Configure FDR in CrowdStrike - You must contact the [CrowdStrike support team](https://supportportal.crowdstrike.com/) to enable CrowdStrike FDR.\n\t - Once CrowdStrike FDR is enabled, from the CrowdStrike console, navigate to Support --> API Clients and Keys. \n\t - You need to Create new credentials to copy the AWS Access Key ID, AWS Secret Access Key, SQS Queue URL and AWS Region. \n2. Register AAD application - For DCR to authentiate to ingest data into log analytics, you must use AAD application. \n\t - [Follow the instructions here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-azure-ad-application) (steps 1-5) to get **AAD Tenant Id**, **AAD Client Id** and **AAD Client Secret**. \n\t - For **AAD Principal** Id of this application, access the AAD App through [AAD Portal](https://aad.portal.azure.com/#view/Microsoft_AAD_IAM/StartboardApplicationsMenuBlade/~/AppAppsPreview/menuId/) and capture Object Id from the application overview page.""}, {""title"": ""Deployment Options"", ""description"": ""Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Crowdstrike Falcon Data Replicator connector V2 using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-gov) \t\t\t\n2. Provide the required details such as Microsoft Sentinel Workspace, CrowdStrike AWS credentials, Azure AD Application details and ingestion configurations \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group. It is recommended to create a new Resource Group for deployment of function app and associated resources.\n3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n4. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Crowdstrike Falcon Data Replicator connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy DCE, DCR and Custom Tables for data ingestion**\n\n1. Deploy the required DCE, DCR(s) and the Custom Tables by using the [Data Collection Resource ARM template](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-azuredeploy-data-resource) \n2. After successful deployment of DCE and DCR(s), get the below information and keep it handy (required during Azure Functions app deployment).\n\t - DCE log ingestion - Follow the instructions available at [Create data collection endpoint](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-data-collection-endpoint) (Step 3).\n\t - Immutable Ids of one or more DCRs (as applicable) - Follow the instructions available at [Collect information from the DCR](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#collect-information-from-the-dcr) (Stpe 2).""}, {""title"": """", ""description"": ""**2. Deploy a Function App**\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CrowdstrikeReplicatorV2-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tAWS_KEY\n\t\tAWS_SECRET\n\t\tAWS_REGION_NAME\n\t\tQUEUE_URL\n\t\tUSER_SELECTION_REQUIRE_RAW //True if raw data is required\n\t\tUSER_SELECTION_REQUIRE_SECONDARY //True if secondary data is required\n\t\tMAX_QUEUE_MESSAGES_MAIN_QUEUE // 100 for consumption and 150 for Premium\n\t\tMAX_SCRIPT_EXEC_TIME_MINUTES // add the value of 10 here\n\t\tAZURE_TENANT_ID\n\t\tAZURE_CLIENT_ID\n\t\tAZURE_CLIENT_SECRET\n\t\tDCE_INGESTION_ENDPOINT\n\t\tNORMALIZED_DCR_ID\n\t\tRAW_DATA_DCR_ID\n\t\tEVENT_TO_TABLE_MAPPING_LINK // File is present on github. Add if the file can be accessed using internet\n\t\tREQUIRED_FIELDS_SCHEMA_LINK //File is present on github. Add if the file can be accessed using internet\n\t\tSchedule //Add value as '0 */1 * * * *' to ensure the function runs every minute.\n5. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""SQS and AWS S3 account credentials/permissions"", ""description"": ""**AWS_SECRET**, **AWS_REGION_NAME**, **AWS_KEY**, **QUEUE_URL** is required. [See the documentation to learn more about data pulling](https://www.crowdstrike.com/blog/tech-center/intro-to-falcon-data-replicator/). To start, contact CrowdStrike support. At your request they will create a CrowdStrike managed Amazon Web Services (AWS) S3 bucket for short term storage purposes as well as a SQS (simple queue service) account for monitoring changes to the S3 bucket.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CrowdStrike%20Falcon%20Endpoint%20Protection/Data%20Connectors/CrowdstrikeReplicatorCLv2/CrowdstrikeReplicatorV2_ConnectorUI.json","true" +"","CustomLogsAma","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CustomLogsAma","azuresentinel","azure-sentinel-solution-customlogsviaama","2024-07-21","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"CommonSecurityLog","CyberArk Enterprise Password Vault (EPV) Events","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyberArk%20Enterprise%20Password%20Vault%20%28EPV%29%20Events","cyberark","cyberark_epv_events_mss","2022-05-02","","","Cyberark","Partner","https://www.cyberark.com/services-support/technical-support/","","domains","CyberArk","Cyber-Ark","[Deprecated] CyberArk Enterprise Password Vault (EPV) Events via Legacy Agent","CyberArk Enterprise Password Vault generates an xml Syslog message for every action taken against the Vault. The EPV will send the xml messages through the Microsoft Sentinel.xsl translator to be converted into CEF standard format and sent to a syslog staging server of your choice (syslog-ng, rsyslog). The Log Analytics agent installed on your syslog staging server will import the messages into Microsoft Log Analytics. Refer to the [CyberArk documentation](https://docs.cyberark.com/Product-Doc/OnlineHelp/PAS/Latest/en/Content/PASIMP/DV-Integrating-with-SIEM-Applications.htm) for more guidance on SIEM integrations.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python installed on your machine.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""On the EPV configure the dbparm.ini to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machines IP address.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python installed on your machine using the following command: python -version\n\n>\n\n> 2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machines security according to your organizations security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyberArk%20Enterprise%20Password%20Vault%20%28EPV%29%20Events/Data%20Connectors/CyberArk%20Data%20Connector.json","true" +"CommonSecurityLog","CyberArk Enterprise Password Vault (EPV) Events","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyberArk%20Enterprise%20Password%20Vault%20%28EPV%29%20Events","cyberark","cyberark_epv_events_mss","2022-05-02","","","Cyberark","Partner","https://www.cyberark.com/services-support/technical-support/","","domains","CyberArkAma","Cyber-Ark","[Deprecated] CyberArk Privilege Access Manager (PAM) Events via AMA","CyberArk Privilege Access Manager generates an xml Syslog message for every action taken against the Vault. The PAM will send the xml messages through the Microsoft Sentinel.xsl translator to be converted into CEF standard format and sent to a syslog staging server of your choice (syslog-ng, rsyslog). The Log Analytics agent installed on your syslog staging server will import the messages into Microsoft Log Analytics. Refer to the [CyberArk documentation](https://docs.cyberark.com/privilege-cloud-standard/Latest/en/Content/Privilege%20Cloud/privCloud-connect-siem.htm) for more guidance on SIEM integrations.","[{""title"": """", ""description"": """", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""On the EPV configure the dbparm.ini to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machines IP address."", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machines security according to your organizations security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyberArk%20Enterprise%20Password%20Vault%20%28EPV%29%20Events/Data%20Connectors/template_CyberArkAMA.json","true" +"CyberArkAudit","CyberArkAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyberArkAudit","cyberark","cyberark_audit_sentinel","2024-03-01","","","CyberArk Support","Partner","https://www.cyberark.com/services-support/technical-support-contact/","","domains","CyberArkAudit","CyberArk","CyberArkAudit","The [CyberArk Audit](https://docs.cyberark.com/Audit/Latest/en/Content/Resources/_TopNav/cc_Home.htm) data connector provides the capability to retrieve security event logs of the CyberArk Audit service and more events into Microsoft Sentinel through the REST API. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Azure Blob Storage API to pull logs into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details.""}, {""title"": """", ""description"": "">**NOTE:** API authorization key(s) or token(s) are securely stored in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the CyberArk Audit SIEM Integration**\n\n Follow the [instructions](https://docs.cyberark.com/audit/latest/en/Content/Audit/isp_Microsoft_Sentinel.htm?tocpath=SIEM%20integrations%7C_____3) to obtain connection details and credentials.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the CyberArk Audit data connector, have the Workspace Name and Workspace Location (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceName""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""WorkspaceLocation""], ""label"": ""Workspace Location""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the CyberArk Audit data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CyberArkAuditAPI-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **CyberArkAuditUsername**, **CyberArkAuditPassword**, **CyberArkAuditServerURL** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the CyberArk Audit data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CyberArkAudit-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CyberArkXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.10.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tCyberArkAuditUsername\n\t\tCyberArkAuditPassword\n\t\tCyberArkAuditServerURL\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Audit REST API Connections details and Credentials"", ""description"": ""**OauthUsername**, **OauthPassword**, **WebAppID**, **AuditApiKey**, **IdentityEndpoint** and **AuditApiBaseUrl** are required for making API calls.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyberArkAudit/Data%20Connectors/CyberArkAudit_API_FunctionApp.json","true" +"CyberArk_AuditEvents_CL","CyberArkAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyberArkAudit","cyberark","cyberark_audit_sentinel","2024-03-01","","","CyberArk Support","Partner","https://www.cyberark.com/services-support/technical-support-contact/","","domains","CyberArkAudit","CyberArk","CyberArkAudit","The [CyberArk Audit](https://docs.cyberark.com/Audit/Latest/en/Content/Resources/_TopNav/cc_Home.htm) data connector provides the capability to retrieve security event logs of the CyberArk Audit service and more events into Microsoft Sentinel through the REST API. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Azure Blob Storage API to pull logs into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details.""}, {""title"": """", ""description"": "">**NOTE:** API authorization key(s) or token(s) are securely stored in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the CyberArk Audit SIEM Integration**\n\n Follow the [instructions](https://docs.cyberark.com/audit/latest/en/Content/Audit/isp_Microsoft_Sentinel.htm?tocpath=SIEM%20integrations%7C_____3) to obtain connection details and credentials.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the CyberArk Audit data connector, have the Workspace Name and Workspace Location (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceName""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""WorkspaceLocation""], ""label"": ""Workspace Location""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the CyberArk Audit data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-CyberArkAuditAPI-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **CyberArkAuditUsername**, **CyberArkAuditPassword**, **CyberArkAuditServerURL** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the CyberArk Audit data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-CyberArkAudit-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CyberArkXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.10.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tCyberArkAuditUsername\n\t\tCyberArkAuditPassword\n\t\tCyberArkAuditServerURL\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Audit REST API Connections details and Credentials"", ""description"": ""**OauthUsername**, **OauthPassword**, **WebAppID**, **AuditApiKey**, **IdentityEndpoint** and **AuditApiBaseUrl** are required for making API calls.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyberArkAudit/Data%20Connectors/CyberArkAudit_API_FunctionApp.json","true" +"","CyberArkEPM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyberArkEPM","cyberark","cybr_epm_sentinel","2022-04-10","","","CyberArk Support","Partner","https://www.cyberark.com/services-support/technical-support-contact/","","domains","","","","","","","","false" +"","CybersecurityMaturityModelCertification(CMMC)2.0","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CybersecurityMaturityModelCertification%28CMMC%292.0","azuresentinel","azure-sentinel-solution-cmmcv2","2022-01-06","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"CyberSixgill_Alerts_CL","Cybersixgill-Actionable-Alerts","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cybersixgill-Actionable-Alerts","cybersixgill1657701397011","azure-sentinel-cybersixgill-actionable-alerts","2023-02-27","2024-09-24","","Cybersixgill","Partner","https://www.cybersixgill.com/","","domains","CybersixgillActionableAlerts","Cybersixgill","Cybersixgill Actionable Alerts","Actionable alerts provide customized alerts based on configured assets","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Cybersixgill API to pull Alerts into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Cybersixgill Actionable Alerts data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/senitnel-cybersixgill-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **Client ID**, **Client Secret**, **TimeInterval** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Cybersixgill Actionable Alerts data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> NOTE:You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cybersixgill-Actionable-Alerts/Data%20Connectors/CybersixgillAlerts.zip?raw=true) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CybersixgillAlertsXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tClientID\n\t\tClientSecret\n\t\tPolling\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Client_ID** and **Client_Secret** are required for making API calls.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cybersixgill-Actionable-Alerts/Data%20Connectors/Cybersixgill_FunctionApp.json","true" +"","Cyble Vision","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyble%20Vision","cybleinc1737472004964","cybleinc1737472004964-azure-sentinel-offerid","2025-05-05","","","Cyble Support","Partner","https://cyble.com/talk-to-sales/","","domains","","","","","","","","false" +"SecurityEvent","Cyborg Security HUNTER","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyborg%20Security%20HUNTER","cyborgsecurityinc1689265652101","azure-sentinel-solution-cyborgsecurity-hunter","2023-07-03","2023-09-22","","Cyborg Security","Partner","https://hunter.cyborgsecurity.io/customer-support","","domains","CyborgSecurity_HUNTER","Cyborg Security","Cyborg Security HUNTER Hunt Packages","Cyborg Security is a leading provider of advanced threat hunting solutions, with a mission to empower organizations with cutting-edge technology and collaborative tools to proactively detect and respond to cyber threats. Cyborg Security's flagship offering, the HUNTER Platform, combines powerful analytics, curated threat hunting content, and comprehensive hunt management capabilities to create a dynamic ecosystem for effective threat hunting operations.

Follow the steps to gain access to Cyborg Security's Community and setup the 'Open in Tool' capabilities in the HUNTER Platform.","[{""instructions"": [{""parameters"": {""text"": ""Use the following link to find your Azure Tentant ID How to find your Azure Active Directory tenant ID"", ""visible"": true, ""inline"": true}, ""type"": ""InfoMessage""}, {""parameters"": {""fillWith"": [""workspaceName""], ""label"": ""ResourceGroupName & WorkspaceName"", ""value"": ""{0}""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""WorkspaceID"", ""value"": ""{0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""1. Sign up for Cyborg Security's HUNTER Community Account"", ""description"": ""Cyborg Security offers Community Memebers access to a subset of the Emerging Threat Collections and hunt packages.\n\nCreate a Free Commuinity Account to get access to Cyborg Security's Hunt Packages: [Sign Up Now!](https://www.cyborgsecurity.com/user-account-creation/)""}, {""title"": ""2. Configure the Open in Tool Feature"", ""description"": ""\n\n1. Navigate to the [Environment](https://hunter.cyborgsecurity.io/environment) section of the HUNTER Platform.\n2. Fill in te **Root URI** of your environment in the section labeled **Microsoft Sentinel**. Replace the with the IDs and Names of your Subscription, Resource Groups and Workspaces.\n\n https[]()://portal.azure.com#@**AzureTenantID**/blade/Microsoft_OperationsManagementSuite_Workspace/Logs.ReactView/resourceId/%2Fsubscriptions%2F**AzureSubscriptionID**%2Fresourcegroups%2F**ResourceGroupName**%2Fproviders%2Fmicrosoft.operationalinsights%2Fworkspaces%2F<**WorkspaceName**>/\n3. Click **Save**.""}, {""title"": ""3. Execute a HUNTER hunt pacakge in Microsoft Sentinel"", ""description"": ""\n\nIdentify a Cyborg Security HUNTER hunt package to deploy and use the **Open In Tool** button to quickly open Microsoft Sentinel and stage the hunting content.\n\n![image](https://7924572.fs1.hubspotusercontent-na1.net/hubfs/7924572/HUNTER/Screenshots/openintool-ms-new.png)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyborg%20Security%20HUNTER/Data%20Connectors/CyborgSecurity_HUNTER.json","true" +"CyeraAssets_CL","CyeraDSPM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM","cyera1658314682323","azure-sentinel-solution-cyeradspm","2025-10-15","2025-10-29","","Cyera Inc","Partner","https://support.cyera.io","","domains","CyeraDSPMCCF","Cyera Inc","Cyera DSPM Microsoft Sentinel Data Connector","The [Cyera DSPM](https://api.cyera.io/) data connector allows you to connect to your Cyera's DSPM tenant and ingesting Classifications, Assets, Issues, and Identity Resources/Definitions into Microsoft Sentinel. The data connector is built on Microsoft Sentinel's Codeless Connector Framework and uses the Cyera's API to fetch Cyera's [DSPM Telemetry](https://www.cyera.com/) once received can be correlated with security events creating custom columns so that queries don't need to parse it again, thus resulting in better performance.","[{""description"": ""Connect to your Cyera DSPM tenenant via Personal Access Tokens"", ""instructions"": [{""parameters"": {""label"": ""Cyera Personal Access Token Client ID"", ""name"": ""clientId"", ""placeholder"": ""client_id"", ""type"": ""text""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""Cyera Personal Access Token Secret Key"", ""name"": ""clientSecret"", ""placeholder"": ""secret_key"", ""type"": ""password""}, ""type"": ""Textbox""}, {""parameters"": {""connectLabel"": ""Connect"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""title"": ""Cyera DSPM Authentication""}]","{""resourceProvider"": [{""permissionsDisplayText"": ""Read and Write permissions are required."", ""provider"": ""Microsoft.OperationalInsights/workspaces"", ""providerDisplayName"": ""Workspace"", ""requiredPermissions"": {""action"": false, ""delete"": true, ""read"": true, ""write"": true}, ""scope"": ""Workspace""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM/Data%20Connectors/CyeraDSPM_CCF/CyeraDSPMLogs_ConnectorDefinitionCCF.json","true" +"CyeraAssets_MS_CL","CyeraDSPM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM","cyera1658314682323","azure-sentinel-solution-cyeradspm","2025-10-15","2025-10-29","","Cyera Inc","Partner","https://support.cyera.io","","domains","CyeraDSPMCCF","Cyera Inc","Cyera DSPM Microsoft Sentinel Data Connector","The [Cyera DSPM](https://api.cyera.io/) data connector allows you to connect to your Cyera's DSPM tenant and ingesting Classifications, Assets, Issues, and Identity Resources/Definitions into Microsoft Sentinel. The data connector is built on Microsoft Sentinel's Codeless Connector Framework and uses the Cyera's API to fetch Cyera's [DSPM Telemetry](https://www.cyera.com/) once received can be correlated with security events creating custom columns so that queries don't need to parse it again, thus resulting in better performance.","[{""description"": ""Connect to your Cyera DSPM tenenant via Personal Access Tokens"", ""instructions"": [{""parameters"": {""label"": ""Cyera Personal Access Token Client ID"", ""name"": ""clientId"", ""placeholder"": ""client_id"", ""type"": ""text""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""Cyera Personal Access Token Secret Key"", ""name"": ""clientSecret"", ""placeholder"": ""secret_key"", ""type"": ""password""}, ""type"": ""Textbox""}, {""parameters"": {""connectLabel"": ""Connect"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""title"": ""Cyera DSPM Authentication""}]","{""resourceProvider"": [{""permissionsDisplayText"": ""Read and Write permissions are required."", ""provider"": ""Microsoft.OperationalInsights/workspaces"", ""providerDisplayName"": ""Workspace"", ""requiredPermissions"": {""action"": false, ""delete"": true, ""read"": true, ""write"": true}, ""scope"": ""Workspace""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM/Data%20Connectors/CyeraDSPM_CCF/CyeraDSPMLogs_ConnectorDefinitionCCF.json","true" +"CyeraClassifications_CL","CyeraDSPM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM","cyera1658314682323","azure-sentinel-solution-cyeradspm","2025-10-15","2025-10-29","","Cyera Inc","Partner","https://support.cyera.io","","domains","CyeraDSPMCCF","Cyera Inc","Cyera DSPM Microsoft Sentinel Data Connector","The [Cyera DSPM](https://api.cyera.io/) data connector allows you to connect to your Cyera's DSPM tenant and ingesting Classifications, Assets, Issues, and Identity Resources/Definitions into Microsoft Sentinel. The data connector is built on Microsoft Sentinel's Codeless Connector Framework and uses the Cyera's API to fetch Cyera's [DSPM Telemetry](https://www.cyera.com/) once received can be correlated with security events creating custom columns so that queries don't need to parse it again, thus resulting in better performance.","[{""description"": ""Connect to your Cyera DSPM tenenant via Personal Access Tokens"", ""instructions"": [{""parameters"": {""label"": ""Cyera Personal Access Token Client ID"", ""name"": ""clientId"", ""placeholder"": ""client_id"", ""type"": ""text""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""Cyera Personal Access Token Secret Key"", ""name"": ""clientSecret"", ""placeholder"": ""secret_key"", ""type"": ""password""}, ""type"": ""Textbox""}, {""parameters"": {""connectLabel"": ""Connect"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""title"": ""Cyera DSPM Authentication""}]","{""resourceProvider"": [{""permissionsDisplayText"": ""Read and Write permissions are required."", ""provider"": ""Microsoft.OperationalInsights/workspaces"", ""providerDisplayName"": ""Workspace"", ""requiredPermissions"": {""action"": false, ""delete"": true, ""read"": true, ""write"": true}, ""scope"": ""Workspace""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM/Data%20Connectors/CyeraDSPM_CCF/CyeraDSPMLogs_ConnectorDefinitionCCF.json","true" +"CyeraIdentities_CL","CyeraDSPM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM","cyera1658314682323","azure-sentinel-solution-cyeradspm","2025-10-15","2025-10-29","","Cyera Inc","Partner","https://support.cyera.io","","domains","CyeraDSPMCCF","Cyera Inc","Cyera DSPM Microsoft Sentinel Data Connector","The [Cyera DSPM](https://api.cyera.io/) data connector allows you to connect to your Cyera's DSPM tenant and ingesting Classifications, Assets, Issues, and Identity Resources/Definitions into Microsoft Sentinel. The data connector is built on Microsoft Sentinel's Codeless Connector Framework and uses the Cyera's API to fetch Cyera's [DSPM Telemetry](https://www.cyera.com/) once received can be correlated with security events creating custom columns so that queries don't need to parse it again, thus resulting in better performance.","[{""description"": ""Connect to your Cyera DSPM tenenant via Personal Access Tokens"", ""instructions"": [{""parameters"": {""label"": ""Cyera Personal Access Token Client ID"", ""name"": ""clientId"", ""placeholder"": ""client_id"", ""type"": ""text""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""Cyera Personal Access Token Secret Key"", ""name"": ""clientSecret"", ""placeholder"": ""secret_key"", ""type"": ""password""}, ""type"": ""Textbox""}, {""parameters"": {""connectLabel"": ""Connect"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""title"": ""Cyera DSPM Authentication""}]","{""resourceProvider"": [{""permissionsDisplayText"": ""Read and Write permissions are required."", ""provider"": ""Microsoft.OperationalInsights/workspaces"", ""providerDisplayName"": ""Workspace"", ""requiredPermissions"": {""action"": false, ""delete"": true, ""read"": true, ""write"": true}, ""scope"": ""Workspace""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM/Data%20Connectors/CyeraDSPM_CCF/CyeraDSPMLogs_ConnectorDefinitionCCF.json","true" +"CyeraIssues_CL","CyeraDSPM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM","cyera1658314682323","azure-sentinel-solution-cyeradspm","2025-10-15","2025-10-29","","Cyera Inc","Partner","https://support.cyera.io","","domains","CyeraDSPMCCF","Cyera Inc","Cyera DSPM Microsoft Sentinel Data Connector","The [Cyera DSPM](https://api.cyera.io/) data connector allows you to connect to your Cyera's DSPM tenant and ingesting Classifications, Assets, Issues, and Identity Resources/Definitions into Microsoft Sentinel. The data connector is built on Microsoft Sentinel's Codeless Connector Framework and uses the Cyera's API to fetch Cyera's [DSPM Telemetry](https://www.cyera.com/) once received can be correlated with security events creating custom columns so that queries don't need to parse it again, thus resulting in better performance.","[{""description"": ""Connect to your Cyera DSPM tenenant via Personal Access Tokens"", ""instructions"": [{""parameters"": {""label"": ""Cyera Personal Access Token Client ID"", ""name"": ""clientId"", ""placeholder"": ""client_id"", ""type"": ""text""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""Cyera Personal Access Token Secret Key"", ""name"": ""clientSecret"", ""placeholder"": ""secret_key"", ""type"": ""password""}, ""type"": ""Textbox""}, {""parameters"": {""connectLabel"": ""Connect"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""title"": ""Cyera DSPM Authentication""}]","{""resourceProvider"": [{""permissionsDisplayText"": ""Read and Write permissions are required."", ""provider"": ""Microsoft.OperationalInsights/workspaces"", ""providerDisplayName"": ""Workspace"", ""requiredPermissions"": {""action"": false, ""delete"": true, ""read"": true, ""write"": true}, ""scope"": ""Workspace""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM/Data%20Connectors/CyeraDSPM_CCF/CyeraDSPMLogs_ConnectorDefinitionCCF.json","true" +"CyeraAssets_CL","CyeraDSPM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM","cyera1658314682323","azure-sentinel-solution-cyeradspm","2025-10-15","2025-10-29","","Cyera Inc","Partner","https://support.cyera.io","","domains","CyeraFunctionsConnector","Cyera Inc","Cyera DSPM Azure Functions Microsoft Sentinel Data Connector","The **Cyera DSPM Azure Function Connector** enables seamless ingestion of Cyera’s **Data Security Posture Management (DSPM)** telemetry — *Assets*, *Identities*, *Issues*, and *Classifications* — into **Microsoft Sentinel**.\n\nThis connector uses an **Azure Function App** to call Cyera’s REST API on a schedule, fetch the latest DSPM telemetry, and send it to Microsoft Sentinel through the **Azure Monitor Logs Ingestion API** via a **Data Collection Endpoint (DCE)** and **Data Collection Rule (DCR, kind: Direct)** — no agents required.\n\n**Tables created/used**\n\n| Entity | Table | Purpose |\n|---|---|---|\n| Assets | `CyeraAssets_CL` | Raw asset metadata and data-store context |\n| Identities | `CyeraIdentities_CL` | Identity definitions and sensitivity context |\n| Issues | `CyeraIssues_CL` | Findings and remediation details |\n| Classifications | `CyeraClassifications_CL` | Data class & sensitivity definitions |\n| MS View | `CyeraAssets_MS_CL` | Normalized asset view for dashboards |\n\n> **Note:** This v7 connector supersedes the earlier CCF-based approach and aligns with Microsoft’s recommended Direct ingestion path for Microsoft Sentinel.","[{""title"": ""Note"", ""description"": "">**NOTE:** This connector uses an **Azure Function App** and the **Azure Monitor Logs Ingestion API** (DCE + DCR, kind: Direct). Function runtime and data egress may incur charges. See [Azure Functions pricing](https://azure.microsoft.com/pricing/details/functions/).""}, {""title"": ""Optional Step"", ""description"": "">**(Optional)** Store Cyera API credentials in **Azure Key Vault** and reference them from the Function App. See [Key Vault references](https://learn.microsoft.com/azure/app-service/app-service-key-vault-references).""}, {""title"": ""STEP 1 \u2014 Prepare Cyera API Access"", ""description"": ""1) Generate a **Personal Access Token** [Generating Personal Access Token](https://support.cyera.io/hc/en-us/articles/19446274608919-Personal-and-API-Tokens) in your Cyera tenant.\\n2) Note **API Base URL**, **Client ID**, and **Client Secret**.""}, {""title"": ""STEP 2 \u2014 Choose ONE deployment option"", ""description"": ""> Before deploying, have these values handy:"", ""instructions"": [{""parameters"": {""fillWith"": [""CyeraDSPMConnector""], ""label"": ""Cyera Function Connector Name""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""{{workspace-location}}""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""{{workspace-location}}""], ""label"": ""Workspace Location""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""https://api.cyera.io""], ""label"": ""Cyera Base URL""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""CyeraClientID""], ""label"": ""Cyera Personal Access Token Client ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""CyeraSecret""], ""label"": ""Cyera Personal Access Token Secret""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1"", ""description"": ""**Option 1 - Azure Resource Manager (ARM) Template**\n\nUse this method for automated deployment of the Cyera DSPM Functions and all required resources to support the connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/{{deployment-template-uri)\n2. Select the preferred **FunctionName** and **Workspace Name**. \n3. Enter the **Workspace Location**, **Cyera API Base Url**, **Personal Access Token Client ID**, and **Personal Access Token Secret**. \n>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 \u2014 Manual Deployment"", ""description"": ""Follow the [install pack\u2019s step-by-step guide]({{userguide-url}}.\\n\\n1) Create/update the 5 custom tables, data collection rule with format `sentinel-dce-`, and data collection endpoint with format `sentinel-dcr-` using the scripts in [install-pack-v0_7_0/scripts]({{deployment-script-zip-url}}).\\n2) Deploy the Azure Function from the repo`s Function folder (Timer-trigger; schedule typically 5\u201315 minutes).\\n3) Configure Function App settings:\\n - `CyeraBaseUrl` \u2014 Cyera API Base URL\\n - `CyeraClientId` \u2014 Client ID (PAT)\\n - `CyeraSecret` \u2014 Client Secret (PAT)\\n - `DCR_IMMUTABLE_ID` \u2014 DCR immutable ID\\n - `DCE_ENDPOINT` \u2014 Logs ingestion endpoint URL\\n - `STREAM_ASSETS`=`Custom-CyeraAssets`, `STREAM_IDENTITIES`=`Custom-CyeraIdentities`, `STREAM_ISSUES`=`Custom-CyeraIssues`, `STREAM_CLASSIFICATIONS`=`Custom-CyeraClassifications`\\n4) Save and Start the Function App.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM/Data%20Connectors/CyeraDSPM_Functions/FunctionAppDC.json","true" +"CyeraAssets_MS_CL","CyeraDSPM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM","cyera1658314682323","azure-sentinel-solution-cyeradspm","2025-10-15","2025-10-29","","Cyera Inc","Partner","https://support.cyera.io","","domains","CyeraFunctionsConnector","Cyera Inc","Cyera DSPM Azure Functions Microsoft Sentinel Data Connector","The **Cyera DSPM Azure Function Connector** enables seamless ingestion of Cyera’s **Data Security Posture Management (DSPM)** telemetry — *Assets*, *Identities*, *Issues*, and *Classifications* — into **Microsoft Sentinel**.\n\nThis connector uses an **Azure Function App** to call Cyera’s REST API on a schedule, fetch the latest DSPM telemetry, and send it to Microsoft Sentinel through the **Azure Monitor Logs Ingestion API** via a **Data Collection Endpoint (DCE)** and **Data Collection Rule (DCR, kind: Direct)** — no agents required.\n\n**Tables created/used**\n\n| Entity | Table | Purpose |\n|---|---|---|\n| Assets | `CyeraAssets_CL` | Raw asset metadata and data-store context |\n| Identities | `CyeraIdentities_CL` | Identity definitions and sensitivity context |\n| Issues | `CyeraIssues_CL` | Findings and remediation details |\n| Classifications | `CyeraClassifications_CL` | Data class & sensitivity definitions |\n| MS View | `CyeraAssets_MS_CL` | Normalized asset view for dashboards |\n\n> **Note:** This v7 connector supersedes the earlier CCF-based approach and aligns with Microsoft’s recommended Direct ingestion path for Microsoft Sentinel.","[{""title"": ""Note"", ""description"": "">**NOTE:** This connector uses an **Azure Function App** and the **Azure Monitor Logs Ingestion API** (DCE + DCR, kind: Direct). Function runtime and data egress may incur charges. See [Azure Functions pricing](https://azure.microsoft.com/pricing/details/functions/).""}, {""title"": ""Optional Step"", ""description"": "">**(Optional)** Store Cyera API credentials in **Azure Key Vault** and reference them from the Function App. See [Key Vault references](https://learn.microsoft.com/azure/app-service/app-service-key-vault-references).""}, {""title"": ""STEP 1 \u2014 Prepare Cyera API Access"", ""description"": ""1) Generate a **Personal Access Token** [Generating Personal Access Token](https://support.cyera.io/hc/en-us/articles/19446274608919-Personal-and-API-Tokens) in your Cyera tenant.\\n2) Note **API Base URL**, **Client ID**, and **Client Secret**.""}, {""title"": ""STEP 2 \u2014 Choose ONE deployment option"", ""description"": ""> Before deploying, have these values handy:"", ""instructions"": [{""parameters"": {""fillWith"": [""CyeraDSPMConnector""], ""label"": ""Cyera Function Connector Name""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""{{workspace-location}}""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""{{workspace-location}}""], ""label"": ""Workspace Location""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""https://api.cyera.io""], ""label"": ""Cyera Base URL""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""CyeraClientID""], ""label"": ""Cyera Personal Access Token Client ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""CyeraSecret""], ""label"": ""Cyera Personal Access Token Secret""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1"", ""description"": ""**Option 1 - Azure Resource Manager (ARM) Template**\n\nUse this method for automated deployment of the Cyera DSPM Functions and all required resources to support the connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/{{deployment-template-uri)\n2. Select the preferred **FunctionName** and **Workspace Name**. \n3. Enter the **Workspace Location**, **Cyera API Base Url**, **Personal Access Token Client ID**, and **Personal Access Token Secret**. \n>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 \u2014 Manual Deployment"", ""description"": ""Follow the [install pack\u2019s step-by-step guide]({{userguide-url}}.\\n\\n1) Create/update the 5 custom tables, data collection rule with format `sentinel-dce-`, and data collection endpoint with format `sentinel-dcr-` using the scripts in [install-pack-v0_7_0/scripts]({{deployment-script-zip-url}}).\\n2) Deploy the Azure Function from the repo`s Function folder (Timer-trigger; schedule typically 5\u201315 minutes).\\n3) Configure Function App settings:\\n - `CyeraBaseUrl` \u2014 Cyera API Base URL\\n - `CyeraClientId` \u2014 Client ID (PAT)\\n - `CyeraSecret` \u2014 Client Secret (PAT)\\n - `DCR_IMMUTABLE_ID` \u2014 DCR immutable ID\\n - `DCE_ENDPOINT` \u2014 Logs ingestion endpoint URL\\n - `STREAM_ASSETS`=`Custom-CyeraAssets`, `STREAM_IDENTITIES`=`Custom-CyeraIdentities`, `STREAM_ISSUES`=`Custom-CyeraIssues`, `STREAM_CLASSIFICATIONS`=`Custom-CyeraClassifications`\\n4) Save and Start the Function App.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM/Data%20Connectors/CyeraDSPM_Functions/FunctionAppDC.json","true" +"CyeraClassifications_CL","CyeraDSPM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM","cyera1658314682323","azure-sentinel-solution-cyeradspm","2025-10-15","2025-10-29","","Cyera Inc","Partner","https://support.cyera.io","","domains","CyeraFunctionsConnector","Cyera Inc","Cyera DSPM Azure Functions Microsoft Sentinel Data Connector","The **Cyera DSPM Azure Function Connector** enables seamless ingestion of Cyera’s **Data Security Posture Management (DSPM)** telemetry — *Assets*, *Identities*, *Issues*, and *Classifications* — into **Microsoft Sentinel**.\n\nThis connector uses an **Azure Function App** to call Cyera’s REST API on a schedule, fetch the latest DSPM telemetry, and send it to Microsoft Sentinel through the **Azure Monitor Logs Ingestion API** via a **Data Collection Endpoint (DCE)** and **Data Collection Rule (DCR, kind: Direct)** — no agents required.\n\n**Tables created/used**\n\n| Entity | Table | Purpose |\n|---|---|---|\n| Assets | `CyeraAssets_CL` | Raw asset metadata and data-store context |\n| Identities | `CyeraIdentities_CL` | Identity definitions and sensitivity context |\n| Issues | `CyeraIssues_CL` | Findings and remediation details |\n| Classifications | `CyeraClassifications_CL` | Data class & sensitivity definitions |\n| MS View | `CyeraAssets_MS_CL` | Normalized asset view for dashboards |\n\n> **Note:** This v7 connector supersedes the earlier CCF-based approach and aligns with Microsoft’s recommended Direct ingestion path for Microsoft Sentinel.","[{""title"": ""Note"", ""description"": "">**NOTE:** This connector uses an **Azure Function App** and the **Azure Monitor Logs Ingestion API** (DCE + DCR, kind: Direct). Function runtime and data egress may incur charges. See [Azure Functions pricing](https://azure.microsoft.com/pricing/details/functions/).""}, {""title"": ""Optional Step"", ""description"": "">**(Optional)** Store Cyera API credentials in **Azure Key Vault** and reference them from the Function App. See [Key Vault references](https://learn.microsoft.com/azure/app-service/app-service-key-vault-references).""}, {""title"": ""STEP 1 \u2014 Prepare Cyera API Access"", ""description"": ""1) Generate a **Personal Access Token** [Generating Personal Access Token](https://support.cyera.io/hc/en-us/articles/19446274608919-Personal-and-API-Tokens) in your Cyera tenant.\\n2) Note **API Base URL**, **Client ID**, and **Client Secret**.""}, {""title"": ""STEP 2 \u2014 Choose ONE deployment option"", ""description"": ""> Before deploying, have these values handy:"", ""instructions"": [{""parameters"": {""fillWith"": [""CyeraDSPMConnector""], ""label"": ""Cyera Function Connector Name""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""{{workspace-location}}""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""{{workspace-location}}""], ""label"": ""Workspace Location""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""https://api.cyera.io""], ""label"": ""Cyera Base URL""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""CyeraClientID""], ""label"": ""Cyera Personal Access Token Client ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""CyeraSecret""], ""label"": ""Cyera Personal Access Token Secret""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1"", ""description"": ""**Option 1 - Azure Resource Manager (ARM) Template**\n\nUse this method for automated deployment of the Cyera DSPM Functions and all required resources to support the connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/{{deployment-template-uri)\n2. Select the preferred **FunctionName** and **Workspace Name**. \n3. Enter the **Workspace Location**, **Cyera API Base Url**, **Personal Access Token Client ID**, and **Personal Access Token Secret**. \n>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 \u2014 Manual Deployment"", ""description"": ""Follow the [install pack\u2019s step-by-step guide]({{userguide-url}}.\\n\\n1) Create/update the 5 custom tables, data collection rule with format `sentinel-dce-`, and data collection endpoint with format `sentinel-dcr-` using the scripts in [install-pack-v0_7_0/scripts]({{deployment-script-zip-url}}).\\n2) Deploy the Azure Function from the repo`s Function folder (Timer-trigger; schedule typically 5\u201315 minutes).\\n3) Configure Function App settings:\\n - `CyeraBaseUrl` \u2014 Cyera API Base URL\\n - `CyeraClientId` \u2014 Client ID (PAT)\\n - `CyeraSecret` \u2014 Client Secret (PAT)\\n - `DCR_IMMUTABLE_ID` \u2014 DCR immutable ID\\n - `DCE_ENDPOINT` \u2014 Logs ingestion endpoint URL\\n - `STREAM_ASSETS`=`Custom-CyeraAssets`, `STREAM_IDENTITIES`=`Custom-CyeraIdentities`, `STREAM_ISSUES`=`Custom-CyeraIssues`, `STREAM_CLASSIFICATIONS`=`Custom-CyeraClassifications`\\n4) Save and Start the Function App.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM/Data%20Connectors/CyeraDSPM_Functions/FunctionAppDC.json","true" +"CyeraIdentities_CL","CyeraDSPM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM","cyera1658314682323","azure-sentinel-solution-cyeradspm","2025-10-15","2025-10-29","","Cyera Inc","Partner","https://support.cyera.io","","domains","CyeraFunctionsConnector","Cyera Inc","Cyera DSPM Azure Functions Microsoft Sentinel Data Connector","The **Cyera DSPM Azure Function Connector** enables seamless ingestion of Cyera’s **Data Security Posture Management (DSPM)** telemetry — *Assets*, *Identities*, *Issues*, and *Classifications* — into **Microsoft Sentinel**.\n\nThis connector uses an **Azure Function App** to call Cyera’s REST API on a schedule, fetch the latest DSPM telemetry, and send it to Microsoft Sentinel through the **Azure Monitor Logs Ingestion API** via a **Data Collection Endpoint (DCE)** and **Data Collection Rule (DCR, kind: Direct)** — no agents required.\n\n**Tables created/used**\n\n| Entity | Table | Purpose |\n|---|---|---|\n| Assets | `CyeraAssets_CL` | Raw asset metadata and data-store context |\n| Identities | `CyeraIdentities_CL` | Identity definitions and sensitivity context |\n| Issues | `CyeraIssues_CL` | Findings and remediation details |\n| Classifications | `CyeraClassifications_CL` | Data class & sensitivity definitions |\n| MS View | `CyeraAssets_MS_CL` | Normalized asset view for dashboards |\n\n> **Note:** This v7 connector supersedes the earlier CCF-based approach and aligns with Microsoft’s recommended Direct ingestion path for Microsoft Sentinel.","[{""title"": ""Note"", ""description"": "">**NOTE:** This connector uses an **Azure Function App** and the **Azure Monitor Logs Ingestion API** (DCE + DCR, kind: Direct). Function runtime and data egress may incur charges. See [Azure Functions pricing](https://azure.microsoft.com/pricing/details/functions/).""}, {""title"": ""Optional Step"", ""description"": "">**(Optional)** Store Cyera API credentials in **Azure Key Vault** and reference them from the Function App. See [Key Vault references](https://learn.microsoft.com/azure/app-service/app-service-key-vault-references).""}, {""title"": ""STEP 1 \u2014 Prepare Cyera API Access"", ""description"": ""1) Generate a **Personal Access Token** [Generating Personal Access Token](https://support.cyera.io/hc/en-us/articles/19446274608919-Personal-and-API-Tokens) in your Cyera tenant.\\n2) Note **API Base URL**, **Client ID**, and **Client Secret**.""}, {""title"": ""STEP 2 \u2014 Choose ONE deployment option"", ""description"": ""> Before deploying, have these values handy:"", ""instructions"": [{""parameters"": {""fillWith"": [""CyeraDSPMConnector""], ""label"": ""Cyera Function Connector Name""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""{{workspace-location}}""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""{{workspace-location}}""], ""label"": ""Workspace Location""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""https://api.cyera.io""], ""label"": ""Cyera Base URL""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""CyeraClientID""], ""label"": ""Cyera Personal Access Token Client ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""CyeraSecret""], ""label"": ""Cyera Personal Access Token Secret""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1"", ""description"": ""**Option 1 - Azure Resource Manager (ARM) Template**\n\nUse this method for automated deployment of the Cyera DSPM Functions and all required resources to support the connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/{{deployment-template-uri)\n2. Select the preferred **FunctionName** and **Workspace Name**. \n3. Enter the **Workspace Location**, **Cyera API Base Url**, **Personal Access Token Client ID**, and **Personal Access Token Secret**. \n>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 \u2014 Manual Deployment"", ""description"": ""Follow the [install pack\u2019s step-by-step guide]({{userguide-url}}.\\n\\n1) Create/update the 5 custom tables, data collection rule with format `sentinel-dce-`, and data collection endpoint with format `sentinel-dcr-` using the scripts in [install-pack-v0_7_0/scripts]({{deployment-script-zip-url}}).\\n2) Deploy the Azure Function from the repo`s Function folder (Timer-trigger; schedule typically 5\u201315 minutes).\\n3) Configure Function App settings:\\n - `CyeraBaseUrl` \u2014 Cyera API Base URL\\n - `CyeraClientId` \u2014 Client ID (PAT)\\n - `CyeraSecret` \u2014 Client Secret (PAT)\\n - `DCR_IMMUTABLE_ID` \u2014 DCR immutable ID\\n - `DCE_ENDPOINT` \u2014 Logs ingestion endpoint URL\\n - `STREAM_ASSETS`=`Custom-CyeraAssets`, `STREAM_IDENTITIES`=`Custom-CyeraIdentities`, `STREAM_ISSUES`=`Custom-CyeraIssues`, `STREAM_CLASSIFICATIONS`=`Custom-CyeraClassifications`\\n4) Save and Start the Function App.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM/Data%20Connectors/CyeraDSPM_Functions/FunctionAppDC.json","true" +"CyeraIssues_CL","CyeraDSPM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM","cyera1658314682323","azure-sentinel-solution-cyeradspm","2025-10-15","2025-10-29","","Cyera Inc","Partner","https://support.cyera.io","","domains","CyeraFunctionsConnector","Cyera Inc","Cyera DSPM Azure Functions Microsoft Sentinel Data Connector","The **Cyera DSPM Azure Function Connector** enables seamless ingestion of Cyera’s **Data Security Posture Management (DSPM)** telemetry — *Assets*, *Identities*, *Issues*, and *Classifications* — into **Microsoft Sentinel**.\n\nThis connector uses an **Azure Function App** to call Cyera’s REST API on a schedule, fetch the latest DSPM telemetry, and send it to Microsoft Sentinel through the **Azure Monitor Logs Ingestion API** via a **Data Collection Endpoint (DCE)** and **Data Collection Rule (DCR, kind: Direct)** — no agents required.\n\n**Tables created/used**\n\n| Entity | Table | Purpose |\n|---|---|---|\n| Assets | `CyeraAssets_CL` | Raw asset metadata and data-store context |\n| Identities | `CyeraIdentities_CL` | Identity definitions and sensitivity context |\n| Issues | `CyeraIssues_CL` | Findings and remediation details |\n| Classifications | `CyeraClassifications_CL` | Data class & sensitivity definitions |\n| MS View | `CyeraAssets_MS_CL` | Normalized asset view for dashboards |\n\n> **Note:** This v7 connector supersedes the earlier CCF-based approach and aligns with Microsoft’s recommended Direct ingestion path for Microsoft Sentinel.","[{""title"": ""Note"", ""description"": "">**NOTE:** This connector uses an **Azure Function App** and the **Azure Monitor Logs Ingestion API** (DCE + DCR, kind: Direct). Function runtime and data egress may incur charges. See [Azure Functions pricing](https://azure.microsoft.com/pricing/details/functions/).""}, {""title"": ""Optional Step"", ""description"": "">**(Optional)** Store Cyera API credentials in **Azure Key Vault** and reference them from the Function App. See [Key Vault references](https://learn.microsoft.com/azure/app-service/app-service-key-vault-references).""}, {""title"": ""STEP 1 \u2014 Prepare Cyera API Access"", ""description"": ""1) Generate a **Personal Access Token** [Generating Personal Access Token](https://support.cyera.io/hc/en-us/articles/19446274608919-Personal-and-API-Tokens) in your Cyera tenant.\\n2) Note **API Base URL**, **Client ID**, and **Client Secret**.""}, {""title"": ""STEP 2 \u2014 Choose ONE deployment option"", ""description"": ""> Before deploying, have these values handy:"", ""instructions"": [{""parameters"": {""fillWith"": [""CyeraDSPMConnector""], ""label"": ""Cyera Function Connector Name""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""{{workspace-location}}""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""{{workspace-location}}""], ""label"": ""Workspace Location""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""https://api.cyera.io""], ""label"": ""Cyera Base URL""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""CyeraClientID""], ""label"": ""Cyera Personal Access Token Client ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""CyeraSecret""], ""label"": ""Cyera Personal Access Token Secret""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1"", ""description"": ""**Option 1 - Azure Resource Manager (ARM) Template**\n\nUse this method for automated deployment of the Cyera DSPM Functions and all required resources to support the connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/{{deployment-template-uri)\n2. Select the preferred **FunctionName** and **Workspace Name**. \n3. Enter the **Workspace Location**, **Cyera API Base Url**, **Personal Access Token Client ID**, and **Personal Access Token Secret**. \n>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 \u2014 Manual Deployment"", ""description"": ""Follow the [install pack\u2019s step-by-step guide]({{userguide-url}}.\\n\\n1) Create/update the 5 custom tables, data collection rule with format `sentinel-dce-`, and data collection endpoint with format `sentinel-dcr-` using the scripts in [install-pack-v0_7_0/scripts]({{deployment-script-zip-url}}).\\n2) Deploy the Azure Function from the repo`s Function folder (Timer-trigger; schedule typically 5\u201315 minutes).\\n3) Configure Function App settings:\\n - `CyeraBaseUrl` \u2014 Cyera API Base URL\\n - `CyeraClientId` \u2014 Client ID (PAT)\\n - `CyeraSecret` \u2014 Client Secret (PAT)\\n - `DCR_IMMUTABLE_ID` \u2014 DCR immutable ID\\n - `DCE_ENDPOINT` \u2014 Logs ingestion endpoint URL\\n - `STREAM_ASSETS`=`Custom-CyeraAssets`, `STREAM_IDENTITIES`=`Custom-CyeraIdentities`, `STREAM_ISSUES`=`Custom-CyeraIssues`, `STREAM_CLASSIFICATIONS`=`Custom-CyeraClassifications`\\n4) Save and Start the Function App.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/CyeraDSPM/Data%20Connectors/CyeraDSPM_Functions/FunctionAppDC.json","true" +"CyfirmaASCertificatesAlerts_CL","Cyfirma Attack Surface","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Attack%20Surface","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-attack-surface","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaAttackSurfaceAlertsConnector","Microsoft","CYFIRMA Attack Surface","","[{""title"": ""CYFIRMA Attack Surface"", ""description"": ""Connect to CYFIRMA Attack Surface to ingest alerts into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into custom tables during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Attack%20Surface/Data%20Connectors/CyfirmaASAlerts_ccp/CyfirmaASAlerts_DataConnectorDefinition.json","true" +"CyfirmaASCloudWeaknessAlerts_CL","Cyfirma Attack Surface","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Attack%20Surface","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-attack-surface","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaAttackSurfaceAlertsConnector","Microsoft","CYFIRMA Attack Surface","","[{""title"": ""CYFIRMA Attack Surface"", ""description"": ""Connect to CYFIRMA Attack Surface to ingest alerts into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into custom tables during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Attack%20Surface/Data%20Connectors/CyfirmaASAlerts_ccp/CyfirmaASAlerts_DataConnectorDefinition.json","true" +"CyfirmaASConfigurationAlerts_CL","Cyfirma Attack Surface","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Attack%20Surface","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-attack-surface","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaAttackSurfaceAlertsConnector","Microsoft","CYFIRMA Attack Surface","","[{""title"": ""CYFIRMA Attack Surface"", ""description"": ""Connect to CYFIRMA Attack Surface to ingest alerts into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into custom tables during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Attack%20Surface/Data%20Connectors/CyfirmaASAlerts_ccp/CyfirmaASAlerts_DataConnectorDefinition.json","true" +"CyfirmaASDomainIPReputationAlerts_CL","Cyfirma Attack Surface","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Attack%20Surface","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-attack-surface","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaAttackSurfaceAlertsConnector","Microsoft","CYFIRMA Attack Surface","","[{""title"": ""CYFIRMA Attack Surface"", ""description"": ""Connect to CYFIRMA Attack Surface to ingest alerts into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into custom tables during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Attack%20Surface/Data%20Connectors/CyfirmaASAlerts_ccp/CyfirmaASAlerts_DataConnectorDefinition.json","true" +"CyfirmaASDomainIPVulnerabilityAlerts_CL","Cyfirma Attack Surface","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Attack%20Surface","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-attack-surface","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaAttackSurfaceAlertsConnector","Microsoft","CYFIRMA Attack Surface","","[{""title"": ""CYFIRMA Attack Surface"", ""description"": ""Connect to CYFIRMA Attack Surface to ingest alerts into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into custom tables during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Attack%20Surface/Data%20Connectors/CyfirmaASAlerts_ccp/CyfirmaASAlerts_DataConnectorDefinition.json","true" +"CyfirmaASOpenPortsAlerts_CL","Cyfirma Attack Surface","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Attack%20Surface","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-attack-surface","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaAttackSurfaceAlertsConnector","Microsoft","CYFIRMA Attack Surface","","[{""title"": ""CYFIRMA Attack Surface"", ""description"": ""Connect to CYFIRMA Attack Surface to ingest alerts into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into custom tables during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Attack%20Surface/Data%20Connectors/CyfirmaASAlerts_ccp/CyfirmaASAlerts_DataConnectorDefinition.json","true" +"CyfirmaBIDomainITAssetAlerts_CL","Cyfirma Brand Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Brand%20Intelligence","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-brand-intelligence","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaBrandIntelligenceAlertsDC","Microsoft","CYFIRMA Brand Intelligence","","[{""title"": ""CYFIRMA Brand Intelligence"", ""description"": ""Connect to CYFIRMA Brand Intelligence to ingest alerts data into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT Alerts API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into custom tables during ingestion. This enhances performance and efficiency by eliminating the need for query-time parsing."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Brand%20Intelligence/Data%20Connectors/CyfirmaBIAlerts_ccp/CyfirmaBIAlerts_DataConnectorDefinition.json","true" +"CyfirmaBIExecutivePeopleAlerts_CL","Cyfirma Brand Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Brand%20Intelligence","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-brand-intelligence","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaBrandIntelligenceAlertsDC","Microsoft","CYFIRMA Brand Intelligence","","[{""title"": ""CYFIRMA Brand Intelligence"", ""description"": ""Connect to CYFIRMA Brand Intelligence to ingest alerts data into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT Alerts API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into custom tables during ingestion. This enhances performance and efficiency by eliminating the need for query-time parsing."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Brand%20Intelligence/Data%20Connectors/CyfirmaBIAlerts_ccp/CyfirmaBIAlerts_DataConnectorDefinition.json","true" +"CyfirmaBIMaliciousMobileAppsAlerts_CL","Cyfirma Brand Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Brand%20Intelligence","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-brand-intelligence","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaBrandIntelligenceAlertsDC","Microsoft","CYFIRMA Brand Intelligence","","[{""title"": ""CYFIRMA Brand Intelligence"", ""description"": ""Connect to CYFIRMA Brand Intelligence to ingest alerts data into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT Alerts API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into custom tables during ingestion. This enhances performance and efficiency by eliminating the need for query-time parsing."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Brand%20Intelligence/Data%20Connectors/CyfirmaBIAlerts_ccp/CyfirmaBIAlerts_DataConnectorDefinition.json","true" +"CyfirmaBIProductSolutionAlerts_CL","Cyfirma Brand Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Brand%20Intelligence","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-brand-intelligence","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaBrandIntelligenceAlertsDC","Microsoft","CYFIRMA Brand Intelligence","","[{""title"": ""CYFIRMA Brand Intelligence"", ""description"": ""Connect to CYFIRMA Brand Intelligence to ingest alerts data into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT Alerts API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into custom tables during ingestion. This enhances performance and efficiency by eliminating the need for query-time parsing."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Brand%20Intelligence/Data%20Connectors/CyfirmaBIAlerts_ccp/CyfirmaBIAlerts_DataConnectorDefinition.json","true" +"CyfirmaBISocialHandlersAlerts_CL","Cyfirma Brand Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Brand%20Intelligence","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-brand-intelligence","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaBrandIntelligenceAlertsDC","Microsoft","CYFIRMA Brand Intelligence","","[{""title"": ""CYFIRMA Brand Intelligence"", ""description"": ""Connect to CYFIRMA Brand Intelligence to ingest alerts data into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT Alerts API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into custom tables during ingestion. This enhances performance and efficiency by eliminating the need for query-time parsing."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Brand%20Intelligence/Data%20Connectors/CyfirmaBIAlerts_ccp/CyfirmaBIAlerts_DataConnectorDefinition.json","true" +"CyfirmaCompromisedAccounts_CL","Cyfirma Compromised Accounts","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Compromised%20Accounts","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirmacompromisedaccounts","2025-05-15","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaCompromisedAccountsDataConnector","Microsoft","CYFIRMA Compromised Accounts","The CYFIRMA Compromised Accounts data connector enables seamless log ingestion from the DeCYFIR/DeTCT API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR/DeTCT API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency.","[{""title"": ""CYFIRMA Compromised Accounts"", ""description"": ""The CYFIRMA Compromised Accounts Data Connector enables seamless log ingestion from the DeCYFIR/DeTCT API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR/DeTCT API to retrieve logs. Additionally, it supports DCR-based ingestion time transformations, which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""Setting it to true returns only data added since the last API call, while false returns all available data.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Compromised%20Accounts/Data%20Connectors/CyfirmaCompromisedAccounts_ccp/CyfirmaCompAcc_DataConnectorDefinition.json","true" +"CyfirmaCampaigns_CL","Cyfirma Cyber Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Cyber%20Intelligence","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-cyber-intelligence","2025-05-15","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaCyberIntelligenceDC","Microsoft","CYFIRMA Cyber Intelligence","The CYFIRMA Cyber Intelligence data connector enables seamless log ingestion from the DeCYFIR API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR Alerts API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency.","[{""title"": ""CYFIRMA Cyber Intelligence"", ""description"": ""This connector provides the Indicators, Threat actors, Malware and Campaigns logs from CYFIRMA Cyber Intelligence. The connector uses the DeCYFIR API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency."", ""descriptionMarkdown"": ""This connector provides the Indicators, Threat actors, Malware and Campaigns logs from CYFIRMA Cyber Intelligence. The connector uses the DeCYFIR API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Pull all IoC's Or Tailored IoC's"", ""placeholder"": ""All IoC's or Tailored IoC's"", ""type"": ""text"", ""name"": ""apiAll"", ""defaultValue"": ""false"", ""description"": ""Set to true to pull all IoC's, set to false to pull tailoried IoC's""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""defaultValue"": ""false"", ""description"": ""Setting it to true returns only data added since the last API call, while false returns data from the last 24 hours.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Recommended Actions"", ""placeholder"": ""Recommended Action can be any one of:All/Monitor/Block"", ""type"": ""text"", ""name"": ""recommendedActions"", ""defaultValue"": ""All"", ""description"": ""Recommended Action can be any one of:All/Monitor/Block""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Threat Actor Associated"", ""placeholder"": ""Is any Threat Actor Associated with the IoC's"", ""type"": ""text"", ""name"": ""isThreatActorExists"", ""defaultValue"": ""false"", ""description"": ""Is any Threat Actor Associated with the IoC's""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""toggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Cyber%20Intelligence/Data%20Connectors/CyfirmaCyberIntelligence_ccp/CyfirmaCyberIntel_DataConnectorDefinition.json","true" +"CyfirmaIndicators_CL","Cyfirma Cyber Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Cyber%20Intelligence","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-cyber-intelligence","2025-05-15","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaCyberIntelligenceDC","Microsoft","CYFIRMA Cyber Intelligence","The CYFIRMA Cyber Intelligence data connector enables seamless log ingestion from the DeCYFIR API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR Alerts API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency.","[{""title"": ""CYFIRMA Cyber Intelligence"", ""description"": ""This connector provides the Indicators, Threat actors, Malware and Campaigns logs from CYFIRMA Cyber Intelligence. The connector uses the DeCYFIR API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency."", ""descriptionMarkdown"": ""This connector provides the Indicators, Threat actors, Malware and Campaigns logs from CYFIRMA Cyber Intelligence. The connector uses the DeCYFIR API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Pull all IoC's Or Tailored IoC's"", ""placeholder"": ""All IoC's or Tailored IoC's"", ""type"": ""text"", ""name"": ""apiAll"", ""defaultValue"": ""false"", ""description"": ""Set to true to pull all IoC's, set to false to pull tailoried IoC's""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""defaultValue"": ""false"", ""description"": ""Setting it to true returns only data added since the last API call, while false returns data from the last 24 hours.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Recommended Actions"", ""placeholder"": ""Recommended Action can be any one of:All/Monitor/Block"", ""type"": ""text"", ""name"": ""recommendedActions"", ""defaultValue"": ""All"", ""description"": ""Recommended Action can be any one of:All/Monitor/Block""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Threat Actor Associated"", ""placeholder"": ""Is any Threat Actor Associated with the IoC's"", ""type"": ""text"", ""name"": ""isThreatActorExists"", ""defaultValue"": ""false"", ""description"": ""Is any Threat Actor Associated with the IoC's""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""toggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Cyber%20Intelligence/Data%20Connectors/CyfirmaCyberIntelligence_ccp/CyfirmaCyberIntel_DataConnectorDefinition.json","true" +"CyfirmaMalware_CL","Cyfirma Cyber Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Cyber%20Intelligence","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-cyber-intelligence","2025-05-15","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaCyberIntelligenceDC","Microsoft","CYFIRMA Cyber Intelligence","The CYFIRMA Cyber Intelligence data connector enables seamless log ingestion from the DeCYFIR API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR Alerts API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency.","[{""title"": ""CYFIRMA Cyber Intelligence"", ""description"": ""This connector provides the Indicators, Threat actors, Malware and Campaigns logs from CYFIRMA Cyber Intelligence. The connector uses the DeCYFIR API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency."", ""descriptionMarkdown"": ""This connector provides the Indicators, Threat actors, Malware and Campaigns logs from CYFIRMA Cyber Intelligence. The connector uses the DeCYFIR API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Pull all IoC's Or Tailored IoC's"", ""placeholder"": ""All IoC's or Tailored IoC's"", ""type"": ""text"", ""name"": ""apiAll"", ""defaultValue"": ""false"", ""description"": ""Set to true to pull all IoC's, set to false to pull tailoried IoC's""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""defaultValue"": ""false"", ""description"": ""Setting it to true returns only data added since the last API call, while false returns data from the last 24 hours.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Recommended Actions"", ""placeholder"": ""Recommended Action can be any one of:All/Monitor/Block"", ""type"": ""text"", ""name"": ""recommendedActions"", ""defaultValue"": ""All"", ""description"": ""Recommended Action can be any one of:All/Monitor/Block""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Threat Actor Associated"", ""placeholder"": ""Is any Threat Actor Associated with the IoC's"", ""type"": ""text"", ""name"": ""isThreatActorExists"", ""defaultValue"": ""false"", ""description"": ""Is any Threat Actor Associated with the IoC's""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""toggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Cyber%20Intelligence/Data%20Connectors/CyfirmaCyberIntelligence_ccp/CyfirmaCyberIntel_DataConnectorDefinition.json","true" +"CyfirmaThreatActors_CL","Cyfirma Cyber Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Cyber%20Intelligence","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-cyber-intelligence","2025-05-15","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaCyberIntelligenceDC","Microsoft","CYFIRMA Cyber Intelligence","The CYFIRMA Cyber Intelligence data connector enables seamless log ingestion from the DeCYFIR API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR Alerts API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency.","[{""title"": ""CYFIRMA Cyber Intelligence"", ""description"": ""This connector provides the Indicators, Threat actors, Malware and Campaigns logs from CYFIRMA Cyber Intelligence. The connector uses the DeCYFIR API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency."", ""descriptionMarkdown"": ""This connector provides the Indicators, Threat actors, Malware and Campaigns logs from CYFIRMA Cyber Intelligence. The connector uses the DeCYFIR API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Pull all IoC's Or Tailored IoC's"", ""placeholder"": ""All IoC's or Tailored IoC's"", ""type"": ""text"", ""name"": ""apiAll"", ""defaultValue"": ""false"", ""description"": ""Set to true to pull all IoC's, set to false to pull tailoried IoC's""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""defaultValue"": ""false"", ""description"": ""Setting it to true returns only data added since the last API call, while false returns data from the last 24 hours.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Recommended Actions"", ""placeholder"": ""Recommended Action can be any one of:All/Monitor/Block"", ""type"": ""text"", ""name"": ""recommendedActions"", ""defaultValue"": ""All"", ""description"": ""Recommended Action can be any one of:All/Monitor/Block""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Threat Actor Associated"", ""placeholder"": ""Is any Threat Actor Associated with the IoC's"", ""type"": ""text"", ""name"": ""isThreatActorExists"", ""defaultValue"": ""false"", ""description"": ""Is any Threat Actor Associated with the IoC's""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""toggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Cyber%20Intelligence/Data%20Connectors/CyfirmaCyberIntelligence_ccp/CyfirmaCyberIntel_DataConnectorDefinition.json","true" +"CyfirmaDBWMDarkWebAlerts_CL","Cyfirma Digital Risk","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-digital-risk","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaDigitalRiskAlertsConnector","Microsoft","CYFIRMA Digital Risk","The CYFIRMA Digital Risk Alerts data connector enables seamless log ingestion from the DeCYFIR/DeTCT API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR Alerts API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency.","[{""title"": ""CYFIRMA Digital Risk"", ""description"": ""Connect to CYFIRMA Digital Risk Alerts to ingest logs into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT API to retrieve alerts and supports DCR-based ingestion time transformations for efficient log parsing."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk/Data%20Connectors/CyfirmaDigitalRiskAlerts_ccp/CyfirmaDigitalRiskAlerts_DataConnectorDefinition.json","true" +"CyfirmaDBWMPhishingAlerts_CL","Cyfirma Digital Risk","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-digital-risk","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaDigitalRiskAlertsConnector","Microsoft","CYFIRMA Digital Risk","The CYFIRMA Digital Risk Alerts data connector enables seamless log ingestion from the DeCYFIR/DeTCT API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR Alerts API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency.","[{""title"": ""CYFIRMA Digital Risk"", ""description"": ""Connect to CYFIRMA Digital Risk Alerts to ingest logs into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT API to retrieve alerts and supports DCR-based ingestion time transformations for efficient log parsing."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk/Data%20Connectors/CyfirmaDigitalRiskAlerts_ccp/CyfirmaDigitalRiskAlerts_DataConnectorDefinition.json","true" +"CyfirmaDBWMRansomwareAlerts_CL","Cyfirma Digital Risk","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-digital-risk","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaDigitalRiskAlertsConnector","Microsoft","CYFIRMA Digital Risk","The CYFIRMA Digital Risk Alerts data connector enables seamless log ingestion from the DeCYFIR/DeTCT API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR Alerts API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency.","[{""title"": ""CYFIRMA Digital Risk"", ""description"": ""Connect to CYFIRMA Digital Risk Alerts to ingest logs into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT API to retrieve alerts and supports DCR-based ingestion time transformations for efficient log parsing."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk/Data%20Connectors/CyfirmaDigitalRiskAlerts_ccp/CyfirmaDigitalRiskAlerts_DataConnectorDefinition.json","true" +"CyfirmaSPEConfidentialFilesAlerts_CL","Cyfirma Digital Risk","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-digital-risk","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaDigitalRiskAlertsConnector","Microsoft","CYFIRMA Digital Risk","The CYFIRMA Digital Risk Alerts data connector enables seamless log ingestion from the DeCYFIR/DeTCT API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR Alerts API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency.","[{""title"": ""CYFIRMA Digital Risk"", ""description"": ""Connect to CYFIRMA Digital Risk Alerts to ingest logs into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT API to retrieve alerts and supports DCR-based ingestion time transformations for efficient log parsing."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk/Data%20Connectors/CyfirmaDigitalRiskAlerts_ccp/CyfirmaDigitalRiskAlerts_DataConnectorDefinition.json","true" +"CyfirmaSPEPIIAndCIIAlerts_CL","Cyfirma Digital Risk","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-digital-risk","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaDigitalRiskAlertsConnector","Microsoft","CYFIRMA Digital Risk","The CYFIRMA Digital Risk Alerts data connector enables seamless log ingestion from the DeCYFIR/DeTCT API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR Alerts API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency.","[{""title"": ""CYFIRMA Digital Risk"", ""description"": ""Connect to CYFIRMA Digital Risk Alerts to ingest logs into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT API to retrieve alerts and supports DCR-based ingestion time transformations for efficient log parsing."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk/Data%20Connectors/CyfirmaDigitalRiskAlerts_ccp/CyfirmaDigitalRiskAlerts_DataConnectorDefinition.json","true" +"CyfirmaSPESocialThreatAlerts_CL","Cyfirma Digital Risk","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-digital-risk","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaDigitalRiskAlertsConnector","Microsoft","CYFIRMA Digital Risk","The CYFIRMA Digital Risk Alerts data connector enables seamless log ingestion from the DeCYFIR/DeTCT API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR Alerts API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency.","[{""title"": ""CYFIRMA Digital Risk"", ""description"": ""Connect to CYFIRMA Digital Risk Alerts to ingest logs into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT API to retrieve alerts and supports DCR-based ingestion time transformations for efficient log parsing."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk/Data%20Connectors/CyfirmaDigitalRiskAlerts_ccp/CyfirmaDigitalRiskAlerts_DataConnectorDefinition.json","true" +"CyfirmaSPESourceCodeAlerts_CL","Cyfirma Digital Risk","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-digital-risk","2025-03-27","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaDigitalRiskAlertsConnector","Microsoft","CYFIRMA Digital Risk","The CYFIRMA Digital Risk Alerts data connector enables seamless log ingestion from the DeCYFIR/DeTCT API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the DeCYFIR Alerts API to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency.","[{""title"": ""CYFIRMA Digital Risk"", ""description"": ""Connect to CYFIRMA Digital Risk Alerts to ingest logs into Microsoft Sentinel. This connector uses the DeCYFIR/DeTCT API to retrieve alerts and supports DCR-based ingestion time transformations for efficient log parsing."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""description"": ""True: First call returns last 30 days, then only incremental data. False: Returns all data on every call.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Digital%20Risk/Data%20Connectors/CyfirmaDigitalRiskAlerts_ccp/CyfirmaDigitalRiskAlerts_DataConnectorDefinition.json","true" +"CyfirmaVulnerabilities_CL","Cyfirma Vulnerabilities Intel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Vulnerabilities%20Intel","cyfirmaholdingspteltd1742879329545","azure-sentinel-solution-cyfirma-vulnerabilities","2025-05-15","","","CYFIRMA","Partner","https://www.cyfirma.com/contact-us/","","domains","CyfirmaVulnerabilitiesIntelDC","Microsoft","CYFIRMA Vulnerabilities Intelligence","The CYFIRMA Vulnerabilities Intelligence data connector enables seamless log ingestion from the DeCYFIR API into Microsoft Sentinel. Built on the Microsoft Sentinel Codeless Connector Platform, it leverages the CYFIRMA API's to retrieve logs. Additionally, it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview), which parse security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency.","[{""title"": ""CYFIRMA Vulnerabilities Intelligence"", ""description"": ""This connector provides the Vulnerabilities logs from CYFIRMA Vulnerabilities Intelligence. The connector uses the DeCYFIR API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency."", ""descriptionMarkdown"": ""This connector provides the Vulnerabilities logs from CYFIRMA Vulnerabilities Intelligence. The connector uses the DeCYFIR API to retrieve logs and supports DCR-based ingestion time transformations, parsing security data into a custom table during ingestion. This eliminates the need for query-time parsing, enhancing performance and efficiency."", ""estimatedTime"": ""5 minutes"", ""icon"": ""https://www.microsoft.com/favicon.ico"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API URL"", ""placeholder"": ""https://decyfir.cyfirma.com"", ""type"": ""text"", ""name"": ""cyfirmaAPIURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""CYFIRMA API Key"", ""placeholder"": ""CYFIRMA API Key"", ""type"": ""password"", ""name"": ""cyfirmaAPIKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Delta"", ""placeholder"": ""API Delta"", ""type"": ""text"", ""name"": ""apiDelta"", ""defaultValue"": ""false"", ""description"": ""API Delta: If true (default), returns data since the last call; if false or unspecified, returns data from the last 24 hours.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Vendor-Associated Vulnerabilities"", ""placeholder"": """", ""type"": ""text"", ""name"": ""isVendor"", ""defaultValue"": ""false"", ""description"": ""The value for Vendor-Associated Vulnerabilities can be either true or false.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Product-Associated Vulnerabilities"", ""placeholder"": """", ""type"": ""text"", ""name"": ""isProduct"", ""defaultValue"": ""false"", ""description"": ""The value for Product-Associated Vulnerabilities can be either true or false.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Product with Version-Associated Vulnerabilities"", ""placeholder"": """", ""type"": ""text"", ""name"": ""isVersion"", ""defaultValue"": ""false"", ""description"": ""The value for Version-Associated Vulnerabilities can be either true or false.""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyfirma%20Vulnerabilities%20Intel/Data%20Connectors/CyfirmaVulnerabilitiesIntel_ccp/CyfirmaVulnerabilities_DataConnectorDefinition.json","true" +"CynerioEvent_CL","Cynerio","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cynerio","cynerio1681887657820","cynerio-medical-device-security-sentinel-connector","2023-03-29","2023-03-29","","Cynerio","Partner","https://cynerio.com","","domains","CynerioSecurityEvents","Cynerio","Cynerio Security Events","The [Cynerio](https://www.cynerio.com/) connector allows you to easily connect your Cynerio Security Events with Microsoft Sentinel, to view IDS Events. This gives you more insight into your organization network security posture and improves your security operation capabilities. ","[{""title"": ""Configure and connect Cynerio"", ""description"": ""Cynerio can integrate with and export events directly to Microsoft Sentinel via Azure Server. Follow these steps to establish integration:\n\n1. In the Cynerio console, go to Settings > Integrations tab (default), and click on the **+Add Integration** button at the top right.\n\n2. Scroll down to the **SIEM** section.\n\n3. On the Microsoft Sentinel card, click the Connect button.\n\n4. The Integration Details window opens. Use the parameters below to fill out the form and set up the connection."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cynerio/Data%20Connectors/Cynerio_Connector.json","true" +"","Cyware","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Cyware","cywarelabsinc1709256751930","microsoft-sentinel-solution-cyware","2024-03-18","2024-03-18","","Cyware","Partner","","","domains","","","","","","","","false" +"","DEV-0537DetectionandHunting","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/DEV-0537DetectionandHunting","azuresentinel","azure-sentinel-solution-DEV-0537DetectionandHunting","2022-04-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","DNS Essentials","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/DNS%20Essentials","azuresentinel","azure-sentinel-solution-dns-domain","2023-01-14","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"darktrace_model_alerts_CL","Darktrace","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Darktrace","darktrace1655286944672","darktrace_for_sentinel","2022-05-02","","","Darktrace","Partner","https://www.darktrace.com/en/contact/","","domains","DarktraceRESTConnector","Darktrace","Darktrace Connector for Microsoft Sentinel REST API","The Darktrace REST API connector pushes real-time events from Darktrace to Microsoft Sentinel and is designed to be used with the Darktrace Solution for Sentinel. The connector writes logs to a custom log table titled ""darktrace_model_alerts_CL""; Model Breaches, AI Analyst Incidents, System Alerts and Email Alerts can be ingested - additional filters can be set up on the Darktrace System Configuration page. Data is pushed to Sentinel from Darktrace masters.","[{""title"": """", ""description"": ""1. Detailed setup instructions can be found on the Darktrace Customer Portal: https://customerportal.darktrace.com/product-guides/main/microsoft-sentinel-introduction\n 2. Take note of the Workspace ID and the Primary key. You will need to enter these details on your Darktrace System Configuration page.\n "", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Darktrace Configuration"", ""description"": ""1. Perform the following steps on the Darktrace System Configuration page:\n 2. Navigate to the System Configuration Page (Main Menu > Admin > System Config)\n 3. Go into Modules configuration and click on the \""Microsoft Sentinel\"" configuration card\n 4. Select \""HTTPS (JSON)\"" and hit \""New\""\n 5. Fill in the required details and select appropriate filters\n 6. Click \""Verify Alert Settings\"" to attempt authentication and send out a test alert\n 7. Run a \""Look for Test Alerts\"" sample query to validate that the test alert has been received"", ""instructions"": """"}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Darktrace Prerequisites"", ""description"": ""To use this Data Connector a Darktrace master running v5.2+ is required.\n Data is sent to the [Azure Monitor HTTP Data Collector API](https://docs.microsoft.com/azure/azure-monitor/logs/data-collector-api) over HTTPs from Darktrace masters, therefore outbound connectivity from the Darktrace master to Microsoft Sentinel REST API is required.""}, {""name"": ""Filter Darktrace Data"", ""description"": ""During configuration it is possible to set up additional filtering on the Darktrace System Configuration page to constrain the amount or types of data sent.""}, {""name"": ""Try the Darktrace Sentinel Solution"", ""description"": ""You can get the most out of this connector by installing the Darktrace Solution for Microsoft Sentinel. This will provide workbooks to visualise alert data and analytics rules to automatically create alerts and incidents from Darktrace Model Breaches and AI Analyst incidents.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Darktrace/Data%20Connectors/DarktraceConnectorRESTAPI.json","true" +"ThreatIntelligenceIndicator","Datalake2Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Datalake2Sentinel","cert_orange_cyberdefense","microsoft-sentinel-solution-datalake2sentinel","2024-01-15","2024-01-15","","Orange Cyberdefense","Partner","https://www.orangecyberdefense.com/global/contact","","domains,verticals","Datalake2SentinelConnector","Orange Cyberdefense","Datalake2Sentinel","This solution installs the Datalake2Sentinel connector which is built using the Codeless Connector Platform and allows you to automatically ingest threat intelligence indicators from **Datalake Orange Cyberdefense's CTI platform** into Microsoft Sentinel via the Upload Indicators REST API. After installing the solution, configure and enable this data connector by following guidance in Manage solution view.","[{""title"": ""Installation and setup instructions"", ""description"": ""Use the documentation from this Github repository to install and configure the Datalake to Microsoft Sentinel connector. \n\nhttps://github.com/cert-orangecyberdefense/datalake2sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Datalake2Sentinel/Data%20Connectors/Datalake2SentinelConnector.json","true" +"DataminrPulse_Alerts_CL","Dataminr Pulse","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dataminr%20Pulse","dataminrinc1648845584891","dataminr_sentinel","2023-04-12","2023-04-12","","Dataminr Support","Partner","https://www.dataminr.com/dataminr-support#support","","domains","DataminrPulseAlerts","Dataminr","Dataminr Pulse Alerts Data Connector","Dataminr Pulse Alerts Data Connector brings our AI-powered real-time intelligence into Microsoft Sentinel for faster threat detection and response.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the DataminrPulse in which logs are pushed via Dataminr RTAP and it will ingest logs into Microsoft Sentinel. Furthermore, the connector will fetch the ingested data from the custom logs table and create Threat Intelligence Indicators into Microsoft Sentinel Threat Intelligence. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1- Credentials for the Dataminr Pulse Client ID and Client Secret**\n\n * Obtain Dataminr Pulse user ID/password and API client ID/secret from your Dataminr Customer Success Manager (CSM).""}, {""title"": """", ""description"": ""**STEP 2- Configure Watchlists in Dataminr Pulse portal.**\n\n Follow the steps in this section to configure watchlists in portal:\n\n 1. **Login** to the Dataminr Pulse [website](https://app.dataminr.com).\n\n 2. Click on the settings gear icon, and select **Manage Lists**.\n\n 3. Select the type of Watchlist you want to create (Cyber, Topic, Company, etc.) and click the **New List** button.\n\n 4. Provide a **name** for your new Watchlist, and select a highlight color for it, or keep the default color.\n\n 5. When you are done configuring the Watchlist, click **Save** to save it.""}, {""title"": """", ""description"": ""**STEP 3 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of DataminrPulse Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 4 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of DataminrPulse Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of DataminrPulse Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Dataminr Pulse Microsoft Sentinel data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DataminrPulse connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-DataminrPulseAlerts-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-DataminrPulseAlerts-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\n\t a. **Function Name** \n\n\t b. **Location**: The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t c. **Workspace**: Enter Workspace ID of log analytics Workspace ID \n\n\t d. **Workspace Key**: Enter Primary Key of log analytics Workspace \n\n\t e. **DataminrBaseURL**: Enter Base URL starting with \""https://\"" followed by hostname (Example: https://gateway.dataminr.com/) \n\n\t f. **ClientId**: Enter your Dataminr account Client ID \n\n\t g. **ClientSecret**: Enter your Dataminr account Client Secret \n\n\t h. **AzureEntraObjectID**: Enter Object id of your Microsoft Entra App \n\n\t i. **AlertsTableName**: Enter name of the table used to store Dataminr Alerts logs. Default is 'DataminrPulse_Alerts' \n\n\t j. **AzureClientId**: Enter Azure Client ID that you have created during app registration \n\n\t k. **AzureClientSecret**: Enter Azure Client Secret that you have created during creating the client secret \n\n\t l. **AzureTenantId**: Enter Azure Tenant ID of your Azure Active Directory \n\n\t m. **AzureResourceGroupName**: Enter Azure Resource Group Name in which you want deploy the data connector \n\n\t n. **AzureWorkspaceName**: Enter Microsoft Sentinel Workspace Name of Log Analytics workspace \n\n\t o. **AzureSubscriptionId**: Enter Azure Subscription Id which is present in the subscription tab in Microsoft Sentinel \n\n\t p. **LogLevel**: Add log level or log severity value. Default is 'INFO' \n\n\t q. **Schedule**: Enter a valid Quartz Cron-Expression (Example: 0 0 0 * * *) \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Dataminr Pulse Microsoft Sentinel data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": ""1) Deploy a Function App"", ""description"": ""> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-DataminrPulseAlerts-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. DmPulseXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": ""2) Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\n\t a. **Function Name** \n\n\t b. **Location**: The location in which the data collection rules and data collection endpoints should be deployed. \n\n\t c. **Workspace**: Enter Workspace ID of log analytics Workspace ID \n\n\t d. **Workspace Key**: Enter Primary Key of log analytics Workspace \n\n\t e. **DataminrBaseURL**: Enter Base URL starting with \""https://\"" followed by hostname (Example: https://gateway.dataminr.com/) \n\n\t f. **ClientId**: Enter your Dataminr account Client ID \n\n\t g. **ClientSecret**: Enter your Dataminr account Client Secret \n\n\t h. **AzureEntraObjectID**: Enter Object id of your Microsoft Entra App \n\n\t i. **AlertsTableName**: Enter name of the table used to store Dataminr Alerts logs. Default is 'DataminrPulse_Alerts' \n\n\t j. **AzureClientId**: Enter Azure Client ID that you have created during app registration \n\n\t k. **AzureClientSecret**: Enter Azure Client Secret that you have created during creating the client secret \n\n\t l. **AzureTenantId**: Enter Azure Tenant ID of your Azure Active Directory \n\n\t m. **AzureResourceGroupName**: Enter Azure Resource Group Name in which you want deploy the data connector \n\n\t n. **AzureWorkspaceName**: Enter Microsoft Sentinel Workspace Name of Log Analytics workspace \n\n\t o. **AzureSubscriptionId**: Enter Azure Subscription Id which is present in the subscription tab in Microsoft Sentinel \n\n\t p. **LogLevel**: Add log level or log severity value. Default is 'INFO' \n\n\t q. **Schedule**: Enter a valid Quartz Cron-Expression (Example: 0 0 0 * * *) \n\n\t r. **logAnalyticsUri** (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}, {""title"": """", ""description"": ""**STEP 7 - Post Deployment steps**\n\n""}, {""title"": ""1) Get the Function app endpoint"", ""description"": ""1. Go to Azure function Overview page and Click on **\""Functions\""** in the left blade.\n2. Click on the function called **\""DataminrPulseAlertsHttpStarter\""**.\n3. Go to **\""GetFunctionurl\""** and copy the function url.\n4. Replace **{functionname}** with **\""DataminrPulseAlertsSentinelOrchestrator\""** in copied function url.""}, {""title"": ""2) To add integration settings in Dataminr RTAP using the function URL"", ""description"": ""1. Open any API request tool like Postman.\n2. Click on '+' to create a new request.\n3. Select HTTP request method as **'POST'**.\n4. Enter the url prepapred in **point 1)**, in the request URL part.\n5. In Body, select raw JSON and provide request body as below(case-sensitive): \n\t\t{ \n\t\t \""integration-settings\"": \""ADD\"", \n\t\t \""url\"": \""`(URL part from copied Function-url)`\"", \n\t\t \""token\"": \""`(value of code parameter from copied Function-url)`\"" \n\t\t}\n6. After providing all required details, click **Send**.\n7. You will receive an integration setting ID in the HTTP response with a status code of 200.\n8. Save **Integration ID** for future reference.""}, {""title"": """", ""description"": ""*Now we are done with the adding integration settings for Dataminr RTAP. Once the Dataminr RTAP send an alert data, Function app is triggered and you should be able to see the Alerts data from the Dataminr Pulse into LogAnalytics workspace table called \""DataminrPulse_Alerts_CL\"".*\n\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Required Dataminr Credentials/permissions"", ""description"": ""\n\na. Users must have a valid Dataminr Pulse API **client ID** and **secret** to use this data connector.\n\n b. One or more Dataminr Pulse Watchlists must be configured in the Dataminr Pulse website.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dataminr%20Pulse/Data%20Connectors/DataminrPulseAlerts/DataminrPulseAlerts_FunctionApp.json","true" +"CommonSecurityLog","Delinea Secret Server","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Delinea%20Secret%20Server","delineainc1653506022260","delinea_secret_server_mss","2022-05-06","","","Delinea","Partner","https://delinea.com/support/","","domains","DelineaSecretServerAma","Delinea, Inc","[Deprecated] Delinea Secret Server via AMA","Common Event Format (CEF) from Delinea Secret Server ","[{""title"": """", ""description"": """", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine""}, {""title"": ""Step B. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address.""}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Delinea%20Secret%20Server/Data%20Connectors/template_DelineaSecretServerAMA.json","true" +"CommonSecurityLog","Delinea Secret Server","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Delinea%20Secret%20Server","delineainc1653506022260","delinea_secret_server_mss","2022-05-06","","","Delinea","Partner","https://delinea.com/support/","","domains","DelineaSecretServer_CEF","Delinea, Inc","[Deprecated] Delinea Secret Server via Legacy Agent","Common Event Format (CEF) from Delinea Secret Server ","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Delinea Secret Server"", ""description"": ""must be configured to export logs via Syslog \n\n [Learn more about configure Secret Server](https://thy.center/ss/link/syslog)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Delinea%20Secret%20Server/Data%20Connectors/DelineaSecretServer_CEF.json","true" +"","Dev 0270 Detection and Hunting","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dev%200270%20Detection%20and%20Hunting","azuresentinel","azure-sentinel-solution-dev0270detectionandhunting","2022-11-29","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"Syslog","Digital Guardian Data Loss Prevention","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Digital%20Guardian%20Data%20Loss%20Prevention","azuresentinel","azure-sentinel-solution-digitalguardiandlp","2021-07-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","DigitalGuardianDLP","Digital Guardian","[Deprecated] Digital Guardian Data Loss Prevention","[Digital Guardian Data Loss Prevention (DLP)](https://digitalguardian.com/platform-overview) data connector provides the capability to ingest Digital Guardian DLP logs into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**DigitalGuardianDLPEvent**](https://aka.ms/sentinel-DigitalGuardianDLP-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Configure Digital Guardian to forward logs via Syslog to remote server where you will install the agent."", ""description"": ""Follow these steps to configure Digital Guardian to forward logs via Syslog:\n\n1.1. Log in to the Digital Guardian Management Console.\n\n1.2. Select **Workspace** > **Data Export** > **Create Export**.\n\n1.3. From the **Data Sources** list, select **Alerts** or **Events** as the data source.\n\n1.4. From the **Export type** list, select **Syslog**.\n\n1.5. From the **Type list**, select **UDP** or **TCP** as the transport protocol.\n\n1.6. In the **Server** field, type the IP address of your Remote Syslog server.\n\n1.7. In the **Port** field, type 514 (or other port if your Syslog server was configured to use non-default port).\n\n1.8. From the **Severity Level** list, select a severity level.\n\n1.9. Select the **Is Active** check box.\n\n1.9. Click **Next**.\n\n1.10. From the list of available fields, add Alert or Event fields for your data export.\n\n1.11. Select a Criteria for the fields in your data export and click **Next**.\n\n1.12. Select a group for the criteria and click **Next**.\n\n1.13. Click **Test Query**.\n\n1.14. Click **Next**.\n\n1.15. Save the data export.""}, {""title"": ""2. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Server to which the logs will be forwarded.\n\n> Logs on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""3. Check logs in Microsoft Sentinel"", ""description"": ""Open Log Analytics to check if the logs are received using the Syslog schema.\n\n>**NOTE:** It may take up to 15 minutes before new logs will appear in Syslog table."", ""instructions"": []}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Digital%20Guardian%20Data%20Loss%20Prevention/Data%20Connectors/Connector_DigitalGuardian_Syslog.json","true" +"DigitalShadows_CL","Digital Shadows","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Digital%20Shadows","digitalshadows1662022995707","digitalshadows_searchlight_for_sentinel","","","","Digital Shadows","Partner","https://www.digitalshadows.com/","","domains","DigitalShadowsSearchlightAzureFunctions","Digital Shadows","Digital Shadows Searchlight","The Digital Shadows data connector provides ingestion of the incidents and alerts from Digital Shadows Searchlight into the Microsoft Sentinel using the REST API. The connector will provide the incidents and alerts information such that it helps to examine, diagnose and analyse the potential security risks and threats.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a 'Digital Shadows Searchlight' to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the 'Digital Shadows Searchlight' API**\n\nThe provider should provide or link to detailed steps to configure the 'Digital Shadows Searchlight' API endpoint so that the Azure Function can authenticate to it successfully, get its authorization key or token, and pull the appliance's logs into Microsoft Sentinel.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the 'Digital Shadows Searchlight' connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the 'Digital Shadows Searchlight' API authorization key(s) or Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": ""**Option 1 - Azure Resource Manager (ARM) Template**\n\nUse this method for automated deployment of the 'Digital Shadows Searchlight' connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-Digitalshadows-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **API Username**, **API Password**, 'and/or Other required fields'. \n>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": """", ""description"": ""**Option 2 - Manual Deployment of Azure Functions**\n\n Use the following step-by-step instructions to deploy the 'Digital Shadows Searchlight' connector manually with Azure Functions.""}, {""title"": ""1. Create a Function App"", ""description"": ""1. From the Azure Portal, navigate to [Function App](https://portal.azure.com/#blade/HubsExtension/BrowseResource/resourceType/Microsoft.Web%2Fsites/kind/functionapp).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, ensure Runtime stack is set to **python 3.11**. \n4. In the **Hosting** tab, ensure **Plan type** is set to **'Consumption (Serverless)'**.\n5.select Storage account\n6. 'Add other required configurations'. \n5. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""2. Import Function App Code(Zip deployment)"", ""description"": ""1. Install Azure CLI\n2. From terminal type **az functionapp deployment source config-zip -g -n --src ** and hit enter. Set the `ResourceGroup` value to: your resource group name. Set the `FunctionApp` value to: your newly created function app name. Set the `Zip File` value to: `digitalshadowsConnector.zip`(path to your zip file). Note:- Download the zip file from the link - [Function App Code](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Digital%20Shadows/Data%20Connectors/Digital%20Shadows/digitalshadowsConnector.zip)""}, {""title"": ""3. Configure the Function App"", ""description"": ""1. In the Function App screen, click the Function App name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following 'x (number of)' application settings individually, under Name, with their respective string values (case-sensitive) under Value: \n\t\tDigitalShadowsAccountID\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tDigitalShadowsKey\n\t\tDigitalShadowsSecret\n\t\tHistoricalDays\n\t\tDigitalShadowsURL\n\t\tClassificationFilterOperation\n\t\tHighVariabilityClassifications\n\t\tFUNCTION_NAME\n\t\tlogAnalyticsUri (optional)\n(add any other settings required by the Function App)\nSet the `DigitalShadowsURL` value to: `https://api.searchlight.app/v1`\nSet the `HighVariabilityClassifications` value to: `exposed-credential,marked-document`\nSet the `ClassificationFilterOperation` value to: `exclude` for exclude function app or `include` for include function app \n>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Azure Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: https://.ods.opinsights.azure.us. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Digital Shadows account ID, secret and key** is required. See the documentation to learn more about API on the `https://portal-digitalshadows.com/learn/searchlight-api/overview/description`.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Digital%20Shadows/Data%20Connectors/Digital%20Shadows/DigitalShadowsSearchlight_API_functionApp.json","true" +"","DomainTools","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/DomainTools","domaintoolsllc1647901527537","domaintools-iris-investigate","2022-10-20","","","DomainTools","Partner","https://www.domaintools.com/support/","","domains","","","","","","","","false" +"DoppelTable_CL","Doppel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Doppel","doppel","azure-sentinel-solution-doppel","2024-11-20","","","Doppel","Partner","https://www.doppel.com/request-a-demo","","domains","Doppel_DataConnector","Doppel","Doppel Data Connector","The data connector is built on Microsoft Sentinel for Doppel events and alerts and supports DCR-based [ingestion time transformations](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/ingestion-time-transformations) that parses the received security event data into a custom columns so that queries don't need to parse it again, thus resulting in better performance.","[{""title"": ""Configure Doppel Webhook"", ""description"": ""Configure the Webhook in Doppel and Endpoint with permissions in Microsoft Sentinel to send data."", ""instructions"": [{""type"": ""InstructionStepsGroup"", ""parameters"": {""enable"": true, ""userRequestPlaceHolder"": """", ""instructionSteps"": [{""title"": ""Register the Application in Microsoft Entra ID"", ""description"": ""1. **Open the [Microsoft Entra ID page](https://entra.microsoft.com/)**:\n - Click the provided link to open the **Microsoft Entra ID** registration page in a new tab.\n - Ensure you are logged in with an account that has **Admin level** permissions.\n\n2. **Create a New Application**:\n - In the **Microsoft Entra ID portal**, select **App registrations** mentioned on the left-hand side tab.\n - Click on **+ New registration**.\n - Fill out the following fields:\n - **Name**: Enter a name for the app (e.g., \u201cDoppel App\u201d).\n - **Supported account types**: Choose **Accounts in this organizational directory only** (Default Directory only - Single tenant).\n - **Redirect URI**: Leave this blank unless required otherwise.\n - Click **Register** to create the application.\n\n3. **Copy Application and Tenant IDs**:\n - Once the app is registered, note the **Application (client) ID** and **Directory (tenant) ID** from the **Overview** page. You\u2019ll need these for the integration.\n\n4. **Create a Client Secret**:\n - In the **Certificates & secrets** section, click **+ New client secret**.\n - Add a description (e.g., 'Doppel Secret') and set an expiration (e.g., 1 year).\n - Click **Add**.\n - **Copy the client secret value immediately**, as it will not be shown again.""}, {""title"": ""Assign the \""Monitoring Metrics Publisher\"" Role to the App"", ""description"": ""1. **Open the Resource Group in Azure Portal**:\n - Navigate to the **Resource Group** that contains the **Log Analytics Workspace** and **Data Collection Rules (DCRs)** where you want the app to push data.\n\n2. **Assign the Role**:\n - In the **Resource Group** menu, click on **Access control (IAM)** mentioned on the left-hand side tab ..\n - Click on **+ Add** and select **Add role assignment**.\n - In the **Role** dropdown, search for and select the **Monitoring Metrics Publisher** role.\n - Under **Assign access to**, choose **Azure AD user, group, or service principal**.\n - In the **Select** field, search for your registered app by **name** or **client ID**.\n - Click **Save** to assign the role to the application.""}, {""title"": ""Deploy the ARM Template"", ""description"": ""1. **Retrieve the Workspace ID**:\n - After assigning the role, you will need the **Workspace ID**.\n - Navigate to the **Log Analytics Workspace** within the **Resource Group**.\n - In the **Overview** section, locate the **Workspace ID** field under **Workspace details**.\n - **Copy the Workspace ID** and keep it handy for the next steps.\n\n2. **Click the Deploy to Azure Button**:\n - [![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fmetron-labs%2FAzure-Sentinel%2Frefs%2Fheads%2FDoppelSolution%2FSolutions%2FDoppel%2FData%2520Connectors%2FDeployToAzure.json).\n - This will take you directly to the Azure portal to start the deployment.\n\n3. **Review and Customize Parameters**:\n - On the custom deployment page, ensure you\u2019re deploying to the correct **subscription** and **resource group**.\n - Fill in the parameters like **workspace name**, **workspace ID**, and **workspace location**.\n\n4. **Click Review + Create** and then **Create** to deploy the resources.""}, {""title"": ""Verify DCE, DCR, and Log Analytics Table Setup"", ""description"": ""1. **Check the Data Collection Endpoint (DCE)**:\n - After deploying, go to **Azure Portal > Data Collection Endpoints**.\n - Verify that the **DoppelDCE** endpoint has been created successfully.\n - **Copy the DCE Logs Ingestion URI**, as you\u2019ll need this for generating the webhook URL.\n\n2. **Confirm Data Collection Rule (DCR) Setup**:\n - Go to **Azure Portal > Data Collection Rules**.\n - Ensure the **DoppelDCR** rule is present.\n - **Copy the Immutable ID** of the DCR from the Overview page, as you\u2019ll need it for the webhook URL.\n\n3. **Validate Log Analytics Table**:\n - Navigate to your **Log Analytics Workspace** (linked to Microsoft Sentinel).\n - Under the **Tables** section, verify that the **DoppelTable_CL** table has been created successfully and is ready to receive data.""}, {""title"": ""Integrate Doppel Alerts with Microsoft Sentinel"", ""description"": ""1. **Gather Necessary Information**:\n - Collect the following details required for integration:\n - **Data Collection Endpoint ID (DCE-ID)**\n - **Data Collection Rule ID (DCR-ID)**\n - **Microsoft Entra Credentials**: Tenant ID, Client ID, and Client Secret.\n\n2. **Coordinate with Doppel Support**:\n - Share the collected DCE-ID, DCR-ID, and Microsoft Entra credentials with Doppel support.\n - Request assistance to configure these details in the Doppel tenant to enable webhook setup.\n\n3. **Webhook Setup by Doppel**:\n - Doppel will use the provided Resource IDs and credentials to configure a webhook.\n - This webhook will facilitate the forwarding of alerts from Doppel to Microsoft Sentinel.\n\n4. **Verify Alert Delivery in Microsoft Sentinel**:\n - Check that alerts from Doppel are successfully forwarded to Microsoft Sentinel.\n - Validate that the **Workbook** in Microsoft Sentinel is updated with the alert statistics, ensuring seamless data integration.""}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": false}}], ""customs"": [{""name"": ""Microsoft Entra Tenant ID, Client ID and Client Secret"", ""description"": ""Microsoft Entra ID requires a Client ID and Client Secret to authenticate your application. Additionally, Global Admin/Owner level access is required to assign the Entra-registered application a Resource Group Monitoring Metrics Publisher role.""}, {""name"": ""Requires Workspace ID, DCE-URI, DCR-ID"", ""description"": ""You will need to get the Log Analytics Workspace ID, DCE Logs Ingestion URI and DCR Immutable ID for the configuration.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Doppel/Data%20Connectors/Template_Doppel.json","true" +"DragosAlerts_CL","Dragos","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dragos","dragosinc1734451815609","microsoft-sentinel-solution-dragos","2025-01-23","2025-01-23","","Dragos Inc","Partner","https://www.dragos.com","","domains","DragosSitestoreCCP","Dragos"," Dragos Notifications via Cloud Sitestore","The [Dragos Platform](https://www.dragos.com/) is the leading Industrial Cyber Security platform it offers a comprehensive Operational Technology (OT) cyber threat detection built by unrivaled industrial cybersecurity expertise. This solution enables Dragos Platform notification data to be viewed in Microsoft Sentinel so that security analysts are able to triage potential cyber security events occurring in their industrial environments.","[{""description"": ""Please provide the following information to allow Microsoft Sentinel to connect to your Dragos Sitestore."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Dragos Sitestore Hostname"", ""placeholder"": ""dragossitestore.example.com"", ""type"": ""text"", ""name"": ""dragosSitestoreHostname""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Dragos Sitestore API Key ID"", ""placeholder"": ""Enter the API key ID."", ""type"": ""text"", ""name"": ""username""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Dragos Sitestore API Key Secret"", ""placeholder"": ""Enter the API key secret"", ""type"": ""password"", ""name"": ""password""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Minimum Notification Severity. Valid values are 0-5 inclusive. Ensure less than or equal to maximum severity."", ""placeholder"": ""Enter the min severity (recommend 0 for all notifications)"", ""type"": ""number"", ""name"": ""minSeverity""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Maximum Notification Severity. Valid values are 0-5 inclusive. Ensure greater than or equal to minimum severity."", ""placeholder"": ""Enter the max severity (recommend 5 for all notifications)"", ""type"": ""number"", ""name"": ""maxSeverity""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect to Sitestore"", ""disconnectLabel"": ""Disconnect from Sitestore"", ""name"": ""connectionToggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Dragos Sitestore API access"", ""description"": ""A Sitestore user account that has the `notification:read` permission. This account also needs to have an API key that can be provided to Sentinel.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dragos/Data%20Connectors/DragosSiteStore_CCP/dragosSitestoreDataConnectorDefinition.json","true" +"DruvaInsyncEvents_CL","DruvaDataSecurityCloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/DruvaDataSecurityCloud","druva-azuresentinel-solution","azure-sentinel-solution-druva","2024-12-24","","","Druva Inc","Partner","https://support.druva.com/","","domains","DruvaEventCCPDefinition","Microsoft","Druva Events Connector","Provides capability to ingest the Druva events from Druva APIs","[{""description"": "">Note: Configurations to connect to Druva Rest API\n""}, {""description"": ""Step 1: Create credentials from Druva console. Refer this doc for steps:- https://help.druva.com/en/articles/8580838-create-and-manage-api-credentials\n""}, {""description"": ""Step 2: Enter the hostname. For public cloud its apis.druva.com\n""}, {""description"": ""Step 3: Enter client id and client secret key\n""}, {""description"": ""Provide required values:\n"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Hostname"", ""placeholder"": ""Example: apis.druva.com"", ""type"": ""text"", ""name"": ""hostname""}}, {""type"": ""OAuthForm"", ""parameters"": {""clientIdLabel"": ""Client ID"", ""clientSecretLabel"": ""Client Secret"", ""connectButtonLabel"": ""Connect"", ""disconnectButtonLabel"": ""Disconnect""}}], ""title"": ""Connect to Druva API to start collecting logs in Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permission are required"", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Druva API Access"", ""description"": ""Druva API requires a client id and client secret to authenticate""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/DruvaDataSecurityCloud/Data%20Connectors/Druva_ccp/Druva_DataConnectorDefinition.json","true" +"DruvaPlatformEvents_CL","DruvaDataSecurityCloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/DruvaDataSecurityCloud","druva-azuresentinel-solution","azure-sentinel-solution-druva","2024-12-24","","","Druva Inc","Partner","https://support.druva.com/","","domains","DruvaEventCCPDefinition","Microsoft","Druva Events Connector","Provides capability to ingest the Druva events from Druva APIs","[{""description"": "">Note: Configurations to connect to Druva Rest API\n""}, {""description"": ""Step 1: Create credentials from Druva console. Refer this doc for steps:- https://help.druva.com/en/articles/8580838-create-and-manage-api-credentials\n""}, {""description"": ""Step 2: Enter the hostname. For public cloud its apis.druva.com\n""}, {""description"": ""Step 3: Enter client id and client secret key\n""}, {""description"": ""Provide required values:\n"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Hostname"", ""placeholder"": ""Example: apis.druva.com"", ""type"": ""text"", ""name"": ""hostname""}}, {""type"": ""OAuthForm"", ""parameters"": {""clientIdLabel"": ""Client ID"", ""clientSecretLabel"": ""Client Secret"", ""connectButtonLabel"": ""Connect"", ""disconnectButtonLabel"": ""Disconnect""}}], ""title"": ""Connect to Druva API to start collecting logs in Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permission are required"", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Druva API Access"", ""description"": ""Druva API requires a client id and client secret to authenticate""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/DruvaDataSecurityCloud/Data%20Connectors/Druva_ccp/Druva_DataConnectorDefinition.json","true" +"DruvaSecurityEvents_CL","DruvaDataSecurityCloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/DruvaDataSecurityCloud","druva-azuresentinel-solution","azure-sentinel-solution-druva","2024-12-24","","","Druva Inc","Partner","https://support.druva.com/","","domains","DruvaEventCCPDefinition","Microsoft","Druva Events Connector","Provides capability to ingest the Druva events from Druva APIs","[{""description"": "">Note: Configurations to connect to Druva Rest API\n""}, {""description"": ""Step 1: Create credentials from Druva console. Refer this doc for steps:- https://help.druva.com/en/articles/8580838-create-and-manage-api-credentials\n""}, {""description"": ""Step 2: Enter the hostname. For public cloud its apis.druva.com\n""}, {""description"": ""Step 3: Enter client id and client secret key\n""}, {""description"": ""Provide required values:\n"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Hostname"", ""placeholder"": ""Example: apis.druva.com"", ""type"": ""text"", ""name"": ""hostname""}}, {""type"": ""OAuthForm"", ""parameters"": {""clientIdLabel"": ""Client ID"", ""clientSecretLabel"": ""Client Secret"", ""connectButtonLabel"": ""Connect"", ""disconnectButtonLabel"": ""Disconnect""}}], ""title"": ""Connect to Druva API to start collecting logs in Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permission are required"", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Druva API Access"", ""description"": ""Druva API requires a client id and client secret to authenticate""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/DruvaDataSecurityCloud/Data%20Connectors/Druva_ccp/Druva_DataConnectorDefinition.json","true" +"Dynamics365Activity","Dynamics 365","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dynamics%20365","sentinel4dynamics365","dynamics365connector","2023-01-17","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","Dynamics365","Microsoft","Dynamics 365","The Dynamics 365 Common Data Service (CDS) activities connector provides insight into admin, user, and support activities, as well as Microsoft Social Engagement logging events. By connecting Dynamics 365 CRM logs into Microsoft Sentinel, you can view this data in workbooks, use it to create custom alerts, and improve your investigation process. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com//fwlink/p/?linkid=2226719&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""description"": ""Connect [Dynamics 365 CRM](https://aka.ms/Sentinel/Dynamics365) activity logs to your Microsoft Sentinel workspace."", ""instructions"": [{""parameters"": {""connectorKind"": ""Dynamics365"", ""title"": ""Dynamics365"", ""enable"": true}, ""type"": ""SentinelResourceProvider""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Tenant Permissions"", ""description"": ""'Security Administrator' or 'Global Administrator' on the workspace's tenant.""}, {""name"": ""License"", ""description"": ""[Microsoft Dynamics 365 production license](https://docs.microsoft.com/office365/servicedescriptions/microsoft-dynamics-365-online-service-description) (This connector is available for production environments only, not for sandbox). Also, a Microsoft 365 Enterprise [E3 or E5](https://docs.microsoft.com/power-platform/admin/enable-use-comprehensive-auditing#requirements) subscription is required for Activity Logging.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dynamics%20365/Data%20Connectors/template_Dynamics365.json","true" +"DynatraceAttacks_CL","Dynatrace","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dynatrace","dynatrace","dynatrace_azure_sentinel","2022-10-18","2023-10-16","","Dynatrace","Partner","https://www.dynatrace.com/services-support/","","domains","DynatraceAttacks","Dynatrace","Dynatrace Attacks","This connector uses the Dynatrace Attacks REST API to ingest detected attacks into Microsoft Sentinel Log Analytics","[{""title"": ""Dynatrace Attack Events to Microsoft Sentinel"", ""description"": ""Configure and Enable Dynatrace [Application Security](https://www.dynatrace.com/platform/application-security/). \n Follow [these instructions](https://docs.dynatrace.com/docs/shortlink/token#create-api-token) to generate an access token."", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""Dynatrace tenant (ex. xyz.dynatrace.com)"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{dynatraceEnvironmentUrl}}""}]}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Dynatrace tenant (ex. xyz.dynatrace.com)"", ""description"": ""You need a valid Dynatrace tenant with [Application Security](https://www.dynatrace.com/platform/application-security/) enabled, learn more about the [Dynatrace platform](https://www.dynatrace.com/).""}, {""name"": ""Dynatrace Access Token"", ""description"": ""You need a Dynatrace Access Token, the token should have ***Read attacks*** (attacks.read) scope.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dynatrace/Data%20Connectors/Connector_Dynatrace_Attacks.json","true" +"DynatraceAuditLogs_CL","Dynatrace","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dynatrace","dynatrace","dynatrace_azure_sentinel","2022-10-18","2023-10-16","","Dynatrace","Partner","https://www.dynatrace.com/services-support/","","domains","DynatraceAuditLogs","Dynatrace","Dynatrace Audit Logs","This connector uses the [Dynatrace Audit Logs REST API](https://docs.dynatrace.com/docs/dynatrace-api/environment-api/audit-logs) to ingest tenant audit logs into Microsoft Sentinel Log Analytics","[{""title"": ""Dynatrace Audit Log Events to Microsoft Sentinel"", ""description"": ""Enable Dynatrace Audit [Logging](https://docs.dynatrace.com/docs/shortlink/audit-logs#enable-audit-logging). \n Follow [these instructions](https://docs.dynatrace.com/docs/shortlink/token#create-api-token) to generate an access token."", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""Dynatrace tenant (ex. xyz.dynatrace.com)"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{dynatraceEnvironmentUrl}}""}]}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Dynatrace tenant (ex. xyz.dynatrace.com)"", ""description"": ""You need a valid Dynatrace Tenant, to learn more about the Dynatrace platform [Start your free trial](https://www.dynatrace.com/trial).""}, {""name"": ""Dynatrace Access Token"", ""description"": ""You need a Dynatrace Access Token, the token should have ***Read audit logs*** (auditLogs.read) scope.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dynatrace/Data%20Connectors/Connector_Dynatrace_AuditLogs.json","true" +"DynatraceProblems_CL","Dynatrace","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dynatrace","dynatrace","dynatrace_azure_sentinel","2022-10-18","2023-10-16","","Dynatrace","Partner","https://www.dynatrace.com/services-support/","","domains","DynatraceProblems","Dynatrace","Dynatrace Problems","This connector uses the [Dynatrace Problem REST API](https://docs.dynatrace.com/docs/dynatrace-api/environment-api/problems-v2) to ingest problem events into Microsoft Sentinel Log Analytics","[{""title"": ""Dynatrace Problem Events to Microsoft Sentinel"", ""description"": ""Follow [these instructions](https://docs.dynatrace.com/docs/shortlink/token#create-api-token) to generate an access token."", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""Dynatrace tenant (ex. xyz.dynatrace.com)"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{dynatraceEnvironmentUrl}}""}]}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Dynatrace tenant (ex. xyz.dynatrace.com)"", ""description"": ""You need a valid Dynatrace Tenant, to learn more about the Dynatrace platform [Start your free trial](https://www.dynatrace.com/trial).""}, {""name"": ""Dynatrace Access Token"", ""description"": ""You need a Dynatrace Access Token, the token should have ***Read problems*** (problems.read) scope.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dynatrace/Data%20Connectors/Connector_Dynatrace_Problems.json","true" +"DynatraceSecurityProblems_CL","Dynatrace","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dynatrace","dynatrace","dynatrace_azure_sentinel","2022-10-18","2023-10-16","","Dynatrace","Partner","https://www.dynatrace.com/services-support/","","domains","DynatraceRuntimeVulnerabilities","Dynatrace","Dynatrace Runtime Vulnerabilities","This connector uses the [Dynatrace Security Problem REST API](https://docs.dynatrace.com/docs/dynatrace-api/environment-api/application-security/vulnerabilities/get-vulnerabilities) to ingest detected runtime vulnerabilities into Microsoft Sentinel Log Analytics.","[{""title"": ""Dynatrace Vulnerabilities Events to Microsoft Sentinel"", ""description"": ""Configure and Enable Dynatrace [Application Security](https://www.dynatrace.com/platform/application-security/). \n Follow [these instructions](https://docs.dynatrace.com/docs/shortlink/token#create-api-token) to generate an access token."", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""Dynatrace tenant (ex. xyz.dynatrace.com)"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{dynatraceEnvironmentUrl}}""}]}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Dynatrace tenant (ex. xyz.dynatrace.com)"", ""description"": ""You need a valid Dynatrace tenant with [Application Security](https://www.dynatrace.com/platform/application-security/) enabled, learn more about the [Dynatrace platform](https://www.dynatrace.com/).""}, {""name"": ""Dynatrace Access Token"", ""description"": ""You need a Dynatrace Access Token, the token should have ***Read security problems*** (securityProblems.read) scope.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Dynatrace/Data%20Connectors/Connector_Dynatrace_RuntimeVulnerabilities.json","true" +"ESETInspect_CL","ESET Inspect","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ESET%20Inspect","esetresearch1579795941720","eset_enterprise_inspector_mss","2022-06-01","","","ESET Enterprise","Partner","https://www.eset.com/int/business/solutions/endpoint-detection-and-response/","","domains","ESETInspect","ESET Netherlands","ESET Inspect","This connector will ingest detections from [ESET Inspect](https://www.eset.com/int/business/solutions/xdr-extended-detection-and-response/) using the provided [REST API](https://help.eset.com/ei_navigate/latest/en-US/api.html). This API is present in ESET Inspect version 1.4 and later.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to ESET Inspect to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Step 1 - Create an API user"", ""description"": ""1. Log into the ESET PROTECT console with an administrator account, select the **More** tab and the **Users** subtab. \n2. Click on the **ADD NEW** button and add a **native user**.\n3. Create a new user for the API account. **Optional:** Select a **Home group** other than **All** to limit what detections are ingested. \n4. Under the **Permission Sets** tab, assign the **Inspect reviewer permission set**.\n4. Log out of the administrator account and log into the console with the new API credentials for validation, then log out of the API account. \n5.""}, {""title"": ""Step 2 - Copy Workspace ID and Key"", ""description"": "">**IMPORTANT:** Before deploying the ESET Inspect connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Step 3 - Deploy the Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the ESET Inspect connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESETInspect-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **API Username**, **API Password** , enter the **Inspect base URL** and the **first ID** to start ingesting detections from.\n - The defailt starting ID is **0**. This means that all detections will be ingested. \n - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labelled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Access to the ESET PROTECT console"", ""description"": ""Permissions to add users""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ESET%20Inspect/Data%20Connectors/ESETInspect_API_FunctionApp.json","true" +"IntegrationTableIncidents_CL","ESET Protect Platform","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ESET%20Protect%20Platform","eset","eset-protect-platform-solution","2024-10-29","2025-06-17","","ESET Enterprise Integrations","Partner","https://help.eset.com/eset_connect/en-US/integrations.html","","domains","ESETProtectPlatform","ESET","ESET Protect Platform","The ESET Protect Platform data connector enables users to inject detections data from [ESET Protect Platform](https://www.eset.com/int/business/protect-platform/) using the provided [Integration REST API](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ESET%20Protect%20Platform/Data%20Connectors). Integration REST API runs as scheduled Azure Function App.","[{""description"": "">**NOTE:** The ESET Protect Platform data connector uses Azure Functions to connect to the ESET Protect Platform via Eset Connect API to pull detections logs into Microsoft Sentinel. This process might result in additional data ingestion costs. See details on the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/).""}, {""description"": "">**NOTE:** The newest version of the ESET PROTECT Platform and Microsoft Sentinel integration pulls not only detections logs but also newly created incidents. If your integration was set up before 20.06.2025, please follow [these steps](https://help.eset.com/eset_connect/en-US/update_ms_sentinel_integration.html) to update it.""}, {""title"": ""Step 1 - Create an API user"", ""description"": ""Use this [instruction](https://help.eset.com/eset_connect/en-US/create_api_user_account.html) to create an ESET Connect API User account with **Login** and **Password**.""}, {""title"": ""Step 2 - Create a registered application"", ""description"": ""Create a Microsoft Entra ID registered application by following the steps in the [Register a new application instruction.](https://learn.microsoft.com/en-us/entra/identity-platform/quickstart-register-app)""}, {""title"": ""Step 3 - Deploy the ESET Protect Platform data connector using the Azure Resource Manager (ARM) template"", ""description"": ""\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-EsetProtectionPlatform-azuredeploy)\n\n2. Select the name of the **Log Analytics workspace** associated with your Microsoft Sentinel. Select the same **Resource Group** as the Resource Group of the Log Analytics workspace.\n\n3. Type the parameters of the registered application in Microsoft Entra ID: **Azure Client ID**, **Azure Client Secret**, **Azure Tenant ID**, **Object ID**. You can find the **Object ID** on Azure Portal by following this path \n> Microsoft Entra ID -> Manage (on the left-side menu) -> Enterprise applications -> Object ID column (the value next to your registered application name).\n\n4. Provide the ESET Connect API user account **Login** and **Password** obtained in **Step 1**.\n\n5. Select one or more ESET products (ESET PROTECT, ESET Inspect, ESET Cloud Office Security) from which detections are retrieved.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Permission to register an application in Microsoft Entra ID"", ""description"": ""Sufficient permissions to register an application with your Microsoft Entra tenant are required.""}, {""name"": ""Permission to assign a role to the registered application"", ""description"": ""Permission to assign the Monitoring Metrics Publisher role to the registered application in Microsoft Entra ID is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ESET%20Protect%20Platform/Data%20Connectors/ESETProtectPlatform_API_FunctionApp.json","true" +"IntegrationTable_CL","ESET Protect Platform","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ESET%20Protect%20Platform","eset","eset-protect-platform-solution","2024-10-29","2025-06-17","","ESET Enterprise Integrations","Partner","https://help.eset.com/eset_connect/en-US/integrations.html","","domains","ESETProtectPlatform","ESET","ESET Protect Platform","The ESET Protect Platform data connector enables users to inject detections data from [ESET Protect Platform](https://www.eset.com/int/business/protect-platform/) using the provided [Integration REST API](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ESET%20Protect%20Platform/Data%20Connectors). Integration REST API runs as scheduled Azure Function App.","[{""description"": "">**NOTE:** The ESET Protect Platform data connector uses Azure Functions to connect to the ESET Protect Platform via Eset Connect API to pull detections logs into Microsoft Sentinel. This process might result in additional data ingestion costs. See details on the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/).""}, {""description"": "">**NOTE:** The newest version of the ESET PROTECT Platform and Microsoft Sentinel integration pulls not only detections logs but also newly created incidents. If your integration was set up before 20.06.2025, please follow [these steps](https://help.eset.com/eset_connect/en-US/update_ms_sentinel_integration.html) to update it.""}, {""title"": ""Step 1 - Create an API user"", ""description"": ""Use this [instruction](https://help.eset.com/eset_connect/en-US/create_api_user_account.html) to create an ESET Connect API User account with **Login** and **Password**.""}, {""title"": ""Step 2 - Create a registered application"", ""description"": ""Create a Microsoft Entra ID registered application by following the steps in the [Register a new application instruction.](https://learn.microsoft.com/en-us/entra/identity-platform/quickstart-register-app)""}, {""title"": ""Step 3 - Deploy the ESET Protect Platform data connector using the Azure Resource Manager (ARM) template"", ""description"": ""\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-EsetProtectionPlatform-azuredeploy)\n\n2. Select the name of the **Log Analytics workspace** associated with your Microsoft Sentinel. Select the same **Resource Group** as the Resource Group of the Log Analytics workspace.\n\n3. Type the parameters of the registered application in Microsoft Entra ID: **Azure Client ID**, **Azure Client Secret**, **Azure Tenant ID**, **Object ID**. You can find the **Object ID** on Azure Portal by following this path \n> Microsoft Entra ID -> Manage (on the left-side menu) -> Enterprise applications -> Object ID column (the value next to your registered application name).\n\n4. Provide the ESET Connect API user account **Login** and **Password** obtained in **Step 1**.\n\n5. Select one or more ESET products (ESET PROTECT, ESET Inspect, ESET Cloud Office Security) from which detections are retrieved.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Permission to register an application in Microsoft Entra ID"", ""description"": ""Sufficient permissions to register an application with your Microsoft Entra tenant are required.""}, {""name"": ""Permission to assign a role to the registered application"", ""description"": ""Permission to assign the Monitoring Metrics Publisher role to the registered application in Microsoft Entra ID is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ESET%20Protect%20Platform/Data%20Connectors/ESETProtectPlatform_API_FunctionApp.json","true" +"Syslog","ESETPROTECT","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ESETPROTECT","cyberdefensegroupbv1625581149103","eset_protect","2021-10-20","","","ESET Netherlands","Partner","https://techcenter.eset.nl/en/","","domains","ESETPROTECT","ESET","[Deprecated] ESET PROTECT","This connector gathers all events generated by ESET software through the central management solution ESET PROTECT (formerly ESET Security Management Center). This includes Anti-Virus detections, Firewall detections but also more advanced EDR detections. For a complete list of events please refer to [the documentation](https://help.eset.com/protect_admin/latest/en-US/events-exported-to-json-format.html).","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias ESETPROTECT and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ESETPROTECT/Parsers/ESETPROTECT.txt).The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n2. Select **Apply below configuration to my machines** and select the facilities and severities. The default ESET PROTECT facility is **user**.\n3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure ESET PROTECT"", ""description"": ""Configure ESET PROTECT to send all events through Syslog.\n\n1. Follow [these instructions](https://help.eset.com/protect_admin/latest/en-US/admin_server_settings_syslog.html) to configure syslog output. Make sure to select **BSD** as the format and **TCP** as the transport.\n\n2. Follow [these instructions](https://help.eset.com/protect_admin/latest/en-US/admin_server_settings_export_to_syslog.html) to export all logs to syslog. Select **JSON** as the output format.\n\nNote:- Refer to the [documentation](https://learn.microsoft.com/en-us/azure/sentinel/connect-log-forwarder?tabs=rsyslog#security-considerations) for setting up the log forwarder for both local and cloud storage."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ESETPROTECT/Data%20Connectors/Connector_Syslog_ESETPROTECT.json","true" +"","EatonForeseer","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/EatonForeseer","azuresentinel","azure-sentinel-solution-eatonforeseer","2022-06-28","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","EclecticIQ","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/EclecticIQ","azuresentinel","azure-sentinel-solution-eclecticiqtip","2022-09-30","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"EgressDefend_CL","Egress Defend","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Egress%20Defend","egress1589289169584","azure-sentinel-solution-egress-defend","2023-07-27","","","egress1589289169584","Partner","https://support.egress.com/s/","","domains","EgressDefendPolling","Egress Software Technologies","Egress Defend","The Egress Defend audit connector provides the capability to ingest Egress Defend Data into Microsoft Sentinel.","[{""title"": ""Connect Egress Defend with Microsoft Sentinel"", ""description"": ""Enter your Egress Defend API URl, Egress Domain and API token."", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""API URL"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{apiUrl}}""}, {""displayText"": ""Domain name"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{domain}}""}]}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions on the Log Analytics workspace are required to enable the data connector."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true, ""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Egress API Token"", ""description"": ""An Egress API token is required to ingest audit records to Microsoft Sentinel.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Egress%20Defend/Data%20Connectors/DefendAPIConnector.json","true" +"DefendAuditData","Egress Iris","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Egress%20Iris","egress1589289169584","egress-sentinel","2024-03-11","","","Egress Software Technologies Ltd","Partner","https://support.egress.com","","domains","EgressSiemPolling","Egress Software Technologies","Egress Iris Connector","The Egress Iris connector will allow you to ingest Egress data into Sentinel.","[{""title"": ""Connect Egress Data with Microsoft Sentinel"", ""description"": ""Enter your Egress API Hostname and secret."", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""Hostname"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{hostname}}""}]}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions on the Log Analytics workspace are required to enable the data connector."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true, ""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Egress API Token"", ""description"": ""An Egress API token is required to ingest audit records to Microsoft Sentinel.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Egress%20Iris/Data%20Connectors/EgressDataConnector.json","true" +"EgressEvents_CL","Egress Iris","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Egress%20Iris","egress1589289169584","egress-sentinel","2024-03-11","","","Egress Software Technologies Ltd","Partner","https://support.egress.com","","domains","EgressSiemPolling","Egress Software Technologies","Egress Iris Connector","The Egress Iris connector will allow you to ingest Egress data into Sentinel.","[{""title"": ""Connect Egress Data with Microsoft Sentinel"", ""description"": ""Enter your Egress API Hostname and secret."", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""Hostname"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{hostname}}""}]}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions on the Log Analytics workspace are required to enable the data connector."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true, ""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Egress API Token"", ""description"": ""An Egress API token is required to ingest audit records to Microsoft Sentinel.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Egress%20Iris/Data%20Connectors/EgressDataConnector.json","true" +"","Elastic Search","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Elastic%20Search","azuresentinel","azure-sentinel-solution-elasticsearch","2022-09-30","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"ElasticAgentLogs_CL","ElasticAgent","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ElasticAgent","azuresentinel","azure-sentinel-solution-elasticagent","2021-11-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","ElasticAgent","Elastic","Elastic Agent","The [Elastic Agent](https://www.elastic.co/security) data connector provides the capability to ingest Elastic Agent logs, metrics, and security data into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**ElasticAgentEvent**](https://aka.ms/sentinel-ElasticAgent-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**NOTE:** This data connector has been developed using **Elastic Agent 7.14**."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Server where the Elastic Agent logs are forwarded.\n\n> Logs from Elastic Agents deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure Elastic Agent (Standalone)"", ""description"": ""[Follow the instructions](https://www.elastic.co/guide/en/fleet/current/elastic-agent-configuration.html) to configure Elastic Agent to output to Logstash""}, {""title"": ""3. Configure Logstash to use Microsoft Logstash Output Plugin"", ""description"": ""Follow the steps to configure Logstash to use microsoft-logstash-output-azure-loganalytics plugin:\n\n3.1) Check if the plugin is already installed:\n> ./logstash-plugin list | grep 'azure-loganalytics'\n**(if the plugin is installed go to step 3.3)**\n\n3.2) Install plugin:\n> ./logstash-plugin install microsoft-logstash-output-azure-loganalytics\n\n3.3) [Configure Logstash](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/microsoft-logstash-output-azure-loganalytics) to use the plugin""}, {""title"": ""4. Validate log ingestion"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using custom table specified in step 3.3 (e.g. ElasticAgentLogs_CL).\n\n>It may take about 30 minutes until the connection streams data to your workspace.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Include custom pre-requisites if the connectivity requires - else delete customs"", ""description"": ""Description for any custom pre-requisite""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ElasticAgent/Data%20Connectors/Connector_ElasticAgent.json","true" +"","Endace","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Endace","azuresentinel","azure-sentinel-solution-endace","2025-03-24","","","Endace","Partner","https://endace.com","","domains","","","","","","","","false" +"","Endpoint Threat Protection Essentials","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Endpoint%20Threat%20Protection%20Essentials","azuresentinel","azure-sentinel-solution-endpointthreat","2022-11-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","Entrust identity as Service","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Entrust%20identity%20as%20Service","azuresentinel","azure-sentinel-solution-entrustidentity","2023-05-22","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"ErmesBrowserSecurityEvents_CL","Ermes Browser Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Ermes%20Browser%20Security","ermes","azure-sentinel-solution-ermes-browser-security","2023-09-29","","","Ermes Cyber Security S.p.A.","Partner","https://www.ermes.company","","domains","ErmesBrowserSecurityEvents","Ermes Cyber Security S.p.A.","Ermes Browser Security Events","Ermes Browser Security Events","[{""description"": ""Connect using OAuth2 credentials"", ""instructions"": [{""type"": ""OAuthForm"", ""parameters"": {""clientIdLabel"": ""Client ID"", ""clientSecretLabel"": ""Client Secret"", ""connectButtonLabel"": ""Connect"", ""disconnectButtonLabel"": ""Disconnect""}}], ""title"": ""Connect Ermes Browser Security Events to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Ermes Client Id and Client Secret"", ""description"": ""Enable API access in Ermes. Please contact [Ermes Cyber Security](https://www.ermes.company) support for more information.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Ermes%20Browser%20Security/Data%20Connectors/ErmesBrowserSecurityEvents_ccp/data_connector_definition.json","true" +"eset_CL","Eset Security Management Center","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Eset%20Security%20Management%20Center","esetresearch1579795941720","Eset_Security_Management_Center_MSS","2022-05-11","","","Eset","partner","https://support.eset.com/en","","domains","EsetSMC","Eset","Eset Security Management Center","Connector for [Eset SMC](https://help.eset.com/esmc_admin/72/en-US/) threat events, audit logs, firewall events and web sites filter.","[{""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure rsyslog to accept logs from your Eset SMC IP address.\n\n```\nsudo -i\r\n\r\n# Set ESET SMC source IP address\r\nexport ESETIP={Enter your IP address}\r\n\r\n# Create rsyslog configuration file\r\ncat > /etc/rsyslog.d/80-remote.conf << EOF\r\n\\$ModLoad imudp\r\n\\$UDPServerRun 514\r\n\\$ModLoad imtcp\r\n\\$InputTCPServerRun 514\r\n\\$AllowedSender TCP, 127.0.0.1, $ESETIP\r\n\\$AllowedSender UDP, 127.0.0.1, $ESETIP\r\nuser.=alert;user.=crit;user.=debug;user.=emerg;user.=err;user.=info;user.=notice;user.=warning @127.0.0.1:25224\r\nEOF\r\n\r\n# Restart rsyslog\r\nsystemctl restart rsyslog```""}, {""title"": ""3. Configure OMS agent to pass Eset SMC data in API format"", ""description"": ""In order to easily recognize Eset data we will push it to separate table and parse at agent so query in Azure Sentinel is easier and fast. To make it simple we will just modify ```match oms.**``` section to send data as API objects by changing type to out_oms_api. Modify file on /etc/opt/microsoft/omsagent/{REPLACEyourworkspaceid}/conf/omsagent.conf. Full ```match oms.**``` section looks like this:\r\n\r\n```\r\n\r\n type out_oms_api\r\n log_level info\r\n num_threads 5\r\n run_in_background false\r\n\r\n omsadmin_conf_path /etc/opt/microsoft/omsagent/{REPLACEyourworkspaceid}/conf/omsadmin.conf\r\n cert_path /etc/opt/microsoft/omsagent/{REPLACEyourworkspaceid}/certs/oms.crt\r\n key_path /etc/opt/microsoft/omsagent/{REPLACEyourworkspaceid}/certs/oms.key\r\n\r\n buffer_chunk_limit 15m\r\n buffer_type file\r\n buffer_path /var/opt/microsoft/omsagent/{REPLACEyourworkspaceid}/state/out_oms_common*.buffer\r\n\r\n buffer_queue_limit 10\r\n buffer_queue_full_action drop_oldest_chunk\r\n flush_interval 20s\r\n retry_limit 10\r\n retry_wait 30s\r\n max_retry_wait 9m\r\n\r\n```\r\n""}, {""title"": ""4. Change OMS agent configuration to catch tag oms.api.eset and parse structured data"", ""description"": ""Modify file /etc/opt/microsoft/omsagent/{REPLACEyourworkspaceid}/conf/omsagent.d/syslog.conf\n```\r\n\r\n type syslog\r\n port 25224\r\n bind 127.0.0.1\r\n protocol_type udp\r\n tag oms.api.eset\r\n\r\n\r\n\r\n @type parser\r\n key_name message\r\n format /(?.*?{.*})/\r\n\r\n\r\n\r\n @type parser\r\n key_name message\r\n format json\r\n\r\n```""}, {""title"": ""5. Disable automatic configuration and restart agent"", ""description"": ""```bash\r\n# Disable changes to configuration files from Portal\r\nsudo su omsagent -c 'python /opt/microsoft/omsconfig/Scripts/OMS_MetaConfigHelper.py --disable'\r\n\r\n# Restart agent\r\nsudo /opt/microsoft/omsagent/bin/service_control restart\r\n\r\n# Check agent logs\r\ntail -f /var/opt/microsoft/omsagent/log/omsagent.log\r\n```""}, {""title"": ""6. Configure Eset SMC to send logs to connector"", ""description"": ""Configure Eset Logs using BSD style and JSON format.\r\n- Go to Syslog server configuration as described in [Eset documentation](https://help.eset.com/esmc_admin/72/en-US/admin_server_settings.html?admin_server_settings_syslog.html) and configure Host (your connector), Format BSD, Transport TCP\r\n- Go to Logging section as described in [Eset documentation](https://help.eset.com/esmc_admin/72/en-US/admin_server_settings.html?admin_server_settings_export_to_syslog.html) and enable JSON""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Access to Eset SMC console"", ""description"": ""Permissions to configure log export""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Eset%20Security%20Management%20Center/Data%20Connectors/esetSmc.json","true" +"Syslog","Exabeam Advanced Analytics","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Exabeam%20Advanced%20Analytics","azuresentinel","azure-sentinel-solution-exabeamadvancedanalytics","2022-05-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","Exabeam","Exabeam","[Deprecated] Exabeam Advanced Analytics","The [Exabeam Advanced Analytics](https://www.exabeam.com/ueba/advanced-analytics-and-mitre-detect-and-stop-threats/) data connector provides the capability to ingest Exabeam Advanced Analytics events into Microsoft Sentinel. Refer to [Exabeam Advanced Analytics documentation](https://docs.exabeam.com/) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Exabeam Advanced Analytics and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Exabeam%20Advanced%20Analytics/Parsers/ExabeamEvent.txt), on the second line of the query, enter the hostname(s) of your Exabeam Advanced Analytics device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": """", ""description"": "">**NOTE:** This data connector has been developed using Exabeam Advanced Analytics i54 (Syslog)"", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the server where the Exabeam Advanced Analytic logs are generated or forwarded.\n\n> Logs from Exabeam Advanced Analytic deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the custom log directory to be collected"", ""instructions"": [{""parameters"": {""linkType"": ""OpenCustomLogsSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure Exabeam event forwarding to Syslog"", ""description"": ""[Follow these instructions](https://docs.exabeam.com/en/advanced-analytics/i56/advanced-analytics-administration-guide/125351-advanced-analytics.html#UUID-7ce5ff9d-56aa-93f0-65de-c5255b682a08) to send Exabeam Advanced Analytics activity log data via syslog.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Exabeam%20Advanced%20Analytics/Data%20Connectors/Connector_Exabeam_Syslog.json","true" +"ExtraHop_Detections_CL","ExtraHop","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ExtraHop","extrahop","extrahop-revealx-sentinel","2025-02-11","2025-06-04","","ExtraHop Support","Partner","https://www.extrahop.com/customer-support","","domains","ExtraHop","ExtraHop","ExtraHop Detections Data Connector","The [ExtraHop](https://extrahop.com/) Detections Data Connector enables you to import detection data from ExtraHop RevealX to Microsoft Sentinel through webhook payloads.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ExtraHop in which logs are pushed via ExtraHop webhook and it will ingest logs into Microsoft Sentinel. Furthermore, the connector will fetch the ingested data from the custom logs table and create Threat Intelligence Indicators into Microsoft Sentinel Threat Intelligence. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias **ExtraHopDetections** and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ExtraHop/Parsers/ExtraHopDetections.yaml). The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the ExtraHop Microsoft Sentinel data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Complete the following steps for automated deployment of the ExtraHop Detections Data Connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ExtraHop-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the values for the following parameters:\n\n\t a. **Function Name** - Enter the Function Name you want. \n\n\t b. **Workspace ID** - Enter the Workspace ID of the log analytics Workspace. \n\n\t c. **Workspace Key** - Enter the Workspace Key of the log analytics Workspace. \n\n\t d. **Detections Table Name** - Enter the name of the table used to store ExtraHop detection data. \n\n\t e. **LogLevel** - Select Debug, Info, Error, or Warning for the log level or log severity value. \n\n\t f. **AppInsightsWorkspaceResourceID** - Enter the value of the 'Log Analytic Workspace-->Properties-->Resource ID' property. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Complete the following steps to manually deploy the ExtraHop Detections Data Connector with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": ""1) Deploy a Function App"", ""description"": ""> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-ExtraHop-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ExtraHopXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": ""2) Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with the following respective values (case-sensitive): \n\n\t a. **Function Name** - Enter the Function Name you want. \n\n\t b. **Workspace ID** - Enter the Workspace ID of the log analytics Workspace. \n\n\tc. **Workspace Key** - Enter the Workspace Key of the log analytics Workspace. \n\n\td. **Detections Table Name** - Enter the name of the table used to store ExtraHop detection data. \n\n\te. **LogLevel** - Select Debug, Info, Error, or Warning for the log level or log severity value. \n\n\t f. **logAnalyticsUri (optional)** - Configure this option to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}, {""title"": """", ""description"": ""**STEP 2 - Post Deployment**\n\n""}, {""title"": ""1) Get the Function App endpoint"", ""description"": ""1. Go to the Azure function overview page and click the **\""Functions\""** tab.\n2. Click on the function called **\""ExtraHopHttpStarter\""**.\n3. Go to **\""GetFunctionurl\""** and copy the function url available under **\""default (Function key)\""**.\n4. Replace **{functionname}** with **\""ExtraHopDetectionsOrchestrator\""** in copied function url.""}, {""title"": ""2) Configure a connection to Microsoft Sentinel and specify webhook payload criteria from RevealX"", ""description"": ""From your ExtraHop system, configure the Microsoft Sentinel integration to establish a connection between Microsoft Sentinel and ExtraHop RevealX and to create detection notification rules that will send webhook data to Microsoft Sentinel. For detailed instructions, refer to [Integrate ExtraHop RevealX with Microsoft Sentinel SIEM](https://docs.extrahop.com/current/integrations-microsoft-sentinel-siem/).""}, {""title"": """", ""description"": ""*After notification rules have been configured and Microsoft Sentinel is receiving webhook data, the Function App is triggered and you can view ExtraHop detections from the Log Analytics workspace table named \""ExtraHop_Detections_CL\"".*\n\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ExtraHop RevealX permissions"", ""description"": ""The following is required on your ExtraHop RevealX system:\n 1.Your RevealX system must be running firmware version 9.9.2 or later.\n 2.Your RevealX system must be connected to ExtraHop Cloud Services.\n 3.Your user account must have System Administratin privileges on RevealX 360 or Full Write privileges on RevealX Enterprise.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ExtraHop/Data%20Connectors/ExtraHopDataConnector/ExtraHop_FunctionApp.json","true" +"CommonSecurityLog","ExtraHop Reveal(x)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ExtraHop%20Reveal%28x%29","extrahop","extrahop_revealx_mss","2022-05-19","","","ExtraHop","Partner","https://www.extrahop.com/support/","","domains","ExtraHopNetworks","ExtraHop Networks","[Deprecated] ExtraHop Reveal(x) via Legacy Agent","The ExtraHop Reveal(x) data connector enables you to easily connect your Reveal(x) system with Microsoft Sentinel to view dashboards, create custom alerts, and improve investigation. This integration gives you the ability to gain insight into your organization's network and improve your security operation capabilities.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python --version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward ExtraHop Networks logs to Syslog agent"", ""description"": ""1. Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure to send the logs to port 514 TCP on the machine IP address.\n2. Follow the directions to install the [ExtraHop Detection SIEM Connector bundle](https://aka.ms/asi-syslog-extrahop-forwarding) on your Reveal(x) system. The SIEM Connector is required for this integration.\n3. Enable the trigger for **ExtraHop Detection SIEM Connector - CEF**\n4. Update the trigger with the ODS syslog targets you created\u00a0\n5. The Reveal(x) system formats syslog messages in Common Event Format (CEF) and then sends data to Microsoft Sentinel.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python --version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""ExtraHop"", ""description"": ""ExtraHop Discover or Command appliance with firmware version 7.8 or later with a user account that has Unlimited (administrator) privileges.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ExtraHop%20Reveal%28x%29/Data%20Connectors/template_ExtraHopNetworks.json","true" +"CommonSecurityLog","ExtraHop Reveal(x)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ExtraHop%20Reveal%28x%29","extrahop","extrahop_revealx_mss","2022-05-19","","","ExtraHop","Partner","https://www.extrahop.com/support/","","domains","ExtraHopNetworksAma","ExtraHop Networks","[Deprecated] ExtraHop Reveal(x) via AMA","The ExtraHop Reveal(x) data connector enables you to easily connect your Reveal(x) system with Microsoft Sentinel to view dashboards, create custom alerts, and improve investigation. This integration gives you the ability to gain insight into your organization's network and improve your security operation capabilities.","[{""title"": """", ""description"": """", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine""}, {""title"": ""Step B. Forward ExtraHop Networks logs to Syslog agent"", ""description"": ""1. Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure to send the logs to port 514 TCP on the machine IP address.\n2. Follow the directions to install the [ExtraHop Detection SIEM Connector bundle](https://aka.ms/asi-syslog-extrahop-forwarding) on your Reveal(x) system. The SIEM Connector is required for this integration.\n3. Enable the trigger for **ExtraHop Detection SIEM Connector - CEF**\n4. Update the trigger with the ODS syslog targets you created\u00a0\n5. The Reveal(x) system formats syslog messages in Common Event Format (CEF) and then sends data to Microsoft Sentinel.""}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ExtraHop%20Reveal%28x%29/Data%20Connectors/template_ExtraHopReveal%28x%29AMA.json","true" +"F5Telemetry_ASM_CL","F5 BIG-IP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/F5%20BIG-IP","f5-networks","f5_bigip_mss","2022-05-25","","","F5 Networks","Partner","https://support.f5.com/csp/home","","domains","F5BigIp","F5 Networks","F5 BIG-IP","The F5 firewall connector allows you to easily connect your F5 logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": ""Configure and connect F5 BIGIP"", ""description"": ""To connect your F5 BIGIP, you have to post a JSON declaration to the system\u2019s API endpoint. For instructions on how to do this, see [Integrating the F5 BGIP with Microsoft Sentinel](https://aka.ms/F5BigIp-Integrate)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/F5%20BIG-IP/Data%20Connectors/F5BigIp.json","true" +"F5Telemetry_LTM_CL","F5 BIG-IP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/F5%20BIG-IP","f5-networks","f5_bigip_mss","2022-05-25","","","F5 Networks","Partner","https://support.f5.com/csp/home","","domains","F5BigIp","F5 Networks","F5 BIG-IP","The F5 firewall connector allows you to easily connect your F5 logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": ""Configure and connect F5 BIGIP"", ""description"": ""To connect your F5 BIGIP, you have to post a JSON declaration to the system\u2019s API endpoint. For instructions on how to do this, see [Integrating the F5 BGIP with Microsoft Sentinel](https://aka.ms/F5BigIp-Integrate)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/F5%20BIG-IP/Data%20Connectors/F5BigIp.json","true" +"F5Telemetry_system_CL","F5 BIG-IP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/F5%20BIG-IP","f5-networks","f5_bigip_mss","2022-05-25","","","F5 Networks","Partner","https://support.f5.com/csp/home","","domains","F5BigIp","F5 Networks","F5 BIG-IP","The F5 firewall connector allows you to easily connect your F5 logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": ""Configure and connect F5 BIGIP"", ""description"": ""To connect your F5 BIGIP, you have to post a JSON declaration to the system\u2019s API endpoint. For instructions on how to do this, see [Integrating the F5 BGIP with Microsoft Sentinel](https://aka.ms/F5BigIp-Integrate)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/F5%20BIG-IP/Data%20Connectors/F5BigIp.json","true" +"CommonSecurityLog","F5 Networks","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/F5%20Networks","f5-networks","f5_networks_data_mss","2022-05-12","","","F5","Partner","https://www.f5.com/services/support","","domains","F5","F5 Networks","[Deprecated] F5 Networks via Legacy Agent","The F5 firewall connector allows you to easily connect your F5 logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python --version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Configure F5 to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent.\n\nGo to [F5 Configuring Application Security Event Logging](https://aka.ms/asi-syslog-f5-forwarding), follow the instructions to set up remote logging, using the following guidelines:\n\n1. Set the **Remote storage type** to CEF.\n2. Set the **Protocol setting** to UDP.\n3. Set the **IP address** to the Syslog server IP address.\n4. Set the **port number** to 514, or the port your agent uses.\n5. Set the **facility** to the one that you configured in the Syslog agent (by default, the agent sets this to local4).\n6. You can set the **Maximum Query String Size** to be the same as you configured.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python --version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/F5%20Networks/Data%20Connectors/template_F5.json","true" +"CommonSecurityLog","F5 Networks","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/F5%20Networks","f5-networks","f5_networks_data_mss","2022-05-12","","","F5","Partner","https://www.f5.com/services/support","","domains","F5Ama","F5 Networks","[Deprecated] F5 Networks via AMA","The F5 firewall connector allows you to easily connect your F5 logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": """", ""description"": """", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine""}, {""title"": ""Step B. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Configure F5 to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent.\n\nGo to [F5 Configuring Application Security Event Logging](https://aka.ms/asi-syslog-f5-forwarding), follow the instructions to set up remote logging, using the following guidelines:\n\n1. Set the **Remote storage type** to CEF.\n2. Set the **Protocol setting** to UDP.\n3. Set the **IP address** to the Syslog server IP address.\n4. Set the **port number** to 514, or the port your agent uses.\n5. Set the **facility** to the one that you configured in the Syslog agent (by default, the agent sets this to local4).\n6. You can set the **Maximum Query String Size** to be the same as you configured.""}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/F5%20Networks/Data%20Connectors/template_F5NetworksAMA.json","true" +"","FalconFriday","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/FalconFriday","falconforcebv1623147592118","falconfriday_content","2021-10-18","","","FalconForce","Partner","https://www.falconforce.nl/en/","","domains","","","","","","","","false" +"","Farsight DNSDB","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Farsight%20DNSDB","","","","","","","","","","","","","","","","","","false" +"feedly_indicators_CL","Feedly","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Feedly","feedlyinc1693853810319","azure-sentinel-solution-feedly","2023-08-01","","","Feedly Inc","Partner","https://feedly.com/i/support/contactUs","","domains","Feedly","Feedly","Feedly","This connector allows you to ingest IoCs from Feedly.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions and the Logs Ingestion API to pull IoCs from Feedly into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": ""Step 1 - Prepare Your Environment"", ""description"": ""The Feedly connector will automatically create:\n\n- **Custom Table**: `feedly_indicators_CL` with the required schema\n- **Data Collection Endpoint (DCE)**: For ingesting data\n- **Data Collection Rule (DCR)**: For processing and routing data\n\nNo manual resource creation is required - everything will be created during deployment!\n\nFor detailed instructions, see: [Migrate from HTTP Data Collector API to Logs Ingestion API](https://learn.microsoft.com/azure/azure-monitor/logs/custom-logs-migrate)""}, {""title"": ""Step 2 - Deploy the Connector"", ""description"": ""The ARM template will automatically:\n\n1. Create a managed identity for the Azure Function\n2. Assign the **Monitoring Metrics Publisher** role to the Function App on the DCR\n3. Configure all necessary permissions for data ingestion\n\nNo manual role assignments are required - everything is handled automatically during deployment!""}, {""title"": ""Step 3 - Get your Feedly API token"", ""description"": ""Go to https://feedly.com/i/team/api and generate a new API token for the connector.""}, {""title"": ""(Optional Step) Securely store credentials in Azure Key Vault"", ""description"": ""Azure Key Vault provides a secure mechanism to store and retrieve secrets. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App by using the `@Microsoft.KeyVault(SecretUri={Security Identifier})` schema.""}, {""title"": ""Step 4 - Deploy the connector"", ""description"": ""Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function\n\n>**IMPORTANT:** Before deploying, gather the following information:\n- Feedly API Token and Stream IDs\n\nAll Azure Monitor resources (DCE, DCR, custom table, and role assignments) will be created automatically during deployment."", ""instructions"": [], ""innerSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Feedly connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-Feedly-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the required parameters:\n - **TableName**: Name for the custom table (default: `feedly_indicators_CL`)\n - **FeedlyApiKey**: Your Feedly API token from Step 3\n - **FeedlyStreamIds**: Comma-separated list of Feedly stream IDs\n - **DaysToBackfill**: Number of days to backfill (default: 7)\n\n>**Note**: If using Azure Key Vault secrets, use the `@Microsoft.KeyVault(SecretUri={Security Identifier})` schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Feedly connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""innerSteps"": [{""title"": ""1. Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://github.com/Azure/Azure-Sentinel/raw/refs/heads/master/Solutions/Feedly/Data%20Connectors/FeedlyAzureFunction.zip) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity Bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. FeedlyXXXX).\n\n\te. **Select a runtime:** Choose Python 3.10.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": ""2. Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive):\n\t\t- `DataCollectionEndpoint`: Will be populated automatically after DCE creation\n\t\t- `DcrImmutableId`: Will be populated automatically after DCR creation\n\t\t- `DcrStreamName`: `feedly_indicators_CL`\n\t\t- `FeedlyApiKey`: Your Feedly API token\n\t\t- `FeedlyStreamIds`: Comma-separated Feedly stream IDs\n\t\t- `DaysToBackfill`: Number of days to backfill (e.g., 7)\n\n**Note**: The Function App uses managed identity for authentication to Azure Monitor, so no Azure AD credentials are needed.\n\n>**Note**: Use Azure Key Vault references for sensitive values: `@Microsoft.KeyVault(SecretUri={Security Identifier})`\n\n4. Once all application settings have been entered, click **Save**.""}]}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Azure AD Application Registration"", ""description"": ""An Azure AD App Registration with client credentials and permissions to write to the Data Collection Rule. The application must be granted 'Monitoring Metrics Publisher' role on the DCR.""}, {""name"": ""Data Collection Endpoint and Rule"", ""description"": ""A Data Collection Endpoint (DCE) and Data Collection Rule (DCR) must be created before deploying this connector. [See the documentation to learn more](https://learn.microsoft.com/azure/azure-monitor/logs/custom-logs-migrate).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Feedly/Data%20Connectors/Feedly_API_AzureFunctionApp.json","true" +"CommonSecurityLog","FireEye Network Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/FireEye%20Network%20Security","azuresentinel","azure-sentinel-solution-fireeyenx","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","FireEyeNX","FireEye","[Deprecated] FireEye Network Security (NX) via Legacy Agent","The [FireEye Network Security (NX)](https://www.fireeye.com/products/network-security.html) data connector provides the capability to ingest FireEye Network Security logs into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**FireEyeNXEvent**](https://aka.ms/sentinel-FireEyeNX-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**NOTE:** This data connector has been developed using FEOS release v9.0"", ""instructions"": []}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Configure FireEye NX to send logs using CEF"", ""description"": ""Complete the following steps to send data using CEF:\n\n2.1. Log into the FireEye appliance with an administrator account\n\n2.2. Click **Settings**\n\n2.3. Click **Notifications**\n\nClick **rsyslog**\n\n2.4. Check the **Event type** check box\n\n2.5. Make sure Rsyslog settings are:\n\n- Default format: CEF\n\n- Default delivery: Per event\n\n- Default send as: Alert""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/FireEye%20Network%20Security/Data%20Connectors/Connector_FireEyeNX_CEF.json","true" +"CommonSecurityLog","FireEye Network Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/FireEye%20Network%20Security","azuresentinel","azure-sentinel-solution-fireeyenx","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","FireEyeNXAma","FireEye","[Deprecated] FireEye Network Security (NX) via AMA","The [FireEye Network Security (NX)](https://www.fireeye.com/products/network-security.html) data connector provides the capability to ingest FireEye Network Security logs into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**FireEyeNXEvent**](https://aka.ms/sentinel-FireEyeNX-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Configure FireEye NX to send logs using CEF"", ""description"": ""Complete the following steps to send data using CEF:\n\n2.1. Log into the FireEye appliance with an administrator account\n\n2.2. Click **Settings**\n\n2.3. Click **Notifications**\n\nClick **rsyslog**\n\n2.4. Check the **Event type** check box\n\n2.5. Make sure Rsyslog settings are:\n\n- Default format: CEF\n\n- Default delivery: Per event\n\n- Default send as: Alert"", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/FireEye%20Network%20Security/Data%20Connectors/template_FireEyeNX_CEFAMA.json","true" +"Firework_CL","Flare","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Flare","flaresystmesinc1617114736428","flare-systems-firework-sentinel","2021-10-20","","","Flare","Partner","https://flare.io/company/contact/","","domains","Flare","Flare","Flare","[Flare](https://flare.systems/platform/) connector allows you to receive data and intelligence from Flare on Microsoft Sentinel.","[{""title"": ""1. Creating an Alert Channel for Microsoft Sentinel"", ""description"": """", ""innerSteps"": [{""description"": ""As an organization administrator, authenticate on [Flare](https://app.flare.systems) and access the [team page](https://app.flare.systems#/team) to create a new alert channel.""}, {""description"": ""Click on 'Create a new alert channel' and select 'Microsoft Sentinel'. Enter your Shared Key And WorkspaceID. Save the Alert Channel. \n For more help and details, see our [Azure configuration documentation](https://docs.microsoft.com/azure/sentinel/connect-data-sources)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID"", ""value"": ""{0}""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary key"", ""value"": ""{0} ""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Associating your alert channel to an alert feed"", ""innerSteps"": [{""description"": ""At this point, you may configure alerts to be sent to Microsoft Sentinel the same way that you would configure regular email alerts.""}, {""description"": ""For a more detailed guide, refer to the Flare documentation.""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Required Flare permissions"", ""description"": ""only Flare organization administrators may configure the Microsoft Sentinel integration.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Flare/Data%20Connectors/Connector_REST_API_FlareSystemsFirework.json","true" +"CommonSecurityLog","Forcepoint CASB","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20CASB","microsoftsentinelcommunity","azure-sentinel-solution-forcepoint-casb","2022-05-19","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ForcepointCasb","Forcepoint CASB","[Deprecated] Forcepoint CASB via Legacy Agent","The Forcepoint CASB (Cloud Access Security Broker) Connector allows you to automatically export CASB logs and events into Microsoft Sentinel in real-time. This enriches visibility into user activities across locations and cloud applications, enables further correlation with data from Azure workloads and other feeds, and improves monitoring capability with Workbooks inside Microsoft Sentinel.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel. This machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version \n \n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}, {""title"": ""5. Forcepoint integration installation guide "", ""description"": ""To complete the installation of this Forcepoint product integration, follow the guide linked below.\n\n[Installation Guide >](https://frcpnt.com/casb-sentinel)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20CASB/Data%20Connectors/Forcepoint%20CASB.json","true" +"CommonSecurityLog","Forcepoint CASB","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20CASB","microsoftsentinelcommunity","azure-sentinel-solution-forcepoint-casb","2022-05-19","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ForcepointCasbAma","Forcepoint CASB","[Deprecated] Forcepoint CASB via AMA","The Forcepoint CASB (Cloud Access Security Broker) Connector allows you to automatically export CASB logs and events into Microsoft Sentinel in real-time. This enriches visibility into user activities across locations and cloud applications, enables further correlation with data from Azure workloads and other feeds, and improves monitoring capability with Workbooks inside Microsoft Sentinel.","[{""title"": """", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine.""}, {""title"": ""Step B. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address.""}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}, {""title"": ""3. Forcepoint integration installation guide "", ""description"": ""To complete the installation of this Forcepoint product integration, follow the guide linked below.\n\n[Installation Guide >](https://frcpnt.com/casb-sentinel)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20CASB/Data%20Connectors/template_Forcepoint%20CASBAMA.json","true" +"CommonSecurityLog","Forcepoint CSG","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20CSG","microsoftsentinelcommunity","azure-sentinel-solution-forcepoint-csg","2022-05-10","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ForcepointCSG","Forcepoint","[Deprecated] Forcepoint CSG via Legacy Agent","Forcepoint Cloud Security Gateway is a converged cloud security service that provides visibility, control, and threat protection for users and data, wherever they are. For more information visit: https://www.forcepoint.com/product/cloud-security-gateway","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""This integration requires the Linux Syslog agent to collect your Forcepoint Cloud Security Gateway Web/Email logs on port 514 TCP as Common Event Format (CEF) and forward them to Microsoft Sentinel."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Your Data Connector Syslog Agent Installation Command is:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""2. Implementation options"", ""description"": ""The integration is made available with two implementations options."", ""innerSteps"": [{""title"": ""2.1 Docker Implementation"", ""description"": ""Leverages docker images where the integration component is already installed with all necessary dependencies.\n\nFollow the instructions provided in the Integration Guide linked below.\n\n[Integration Guide >](https://frcpnt.com/csg-sentinel)""}, {""title"": ""2.2 Traditional Implementation"", ""description"": ""Requires the manual deployment of the integration component inside a clean Linux machine.\n\nFollow the instructions provided in the Integration Guide linked below.\n\n[Integration Guide >](https://frcpnt.com/csg-sentinel)""}]}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version \n \n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF).""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20CSG/Data%20Connectors/ForcepointCloudSecurityGateway.json","true" +"CommonSecurityLog","Forcepoint CSG","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20CSG","microsoftsentinelcommunity","azure-sentinel-solution-forcepoint-csg","2022-05-10","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ForcepointCSGAma","Forcepoint","[Deprecated] Forcepoint CSG via AMA","Forcepoint Cloud Security Gateway is a converged cloud security service that provides visibility, control, and threat protection for users and data, wherever they are. For more information visit: https://www.forcepoint.com/product/cloud-security-gateway","[{""title"": """", ""description"": """", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine.""}, {""title"": ""Step B. Implementation options"", ""description"": ""The integration is made available with two implementations options."", ""innerSteps"": [{""title"": ""1. Docker Implementation"", ""description"": ""Leverages docker images where the integration component is already installed with all necessary dependencies.\n\nFollow the instructions provided in the Integration Guide linked below.\n\n[Integration Guide >](https://frcpnt.com/csg-sentinel)""}, {""title"": ""2. Traditional Implementation"", ""description"": ""Requires the manual deployment of the integration component inside a clean Linux machine.\n\nFollow the instructions provided in the Integration Guide linked below.\n\n[Integration Guide >](https://frcpnt.com/csg-sentinel)""}]}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF).""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20CSG/Data%20Connectors/template_ForcepointCloudSecurityGatewayAMA.json","true" +"ForcepointDLPEvents_CL","Forcepoint DLP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20DLP","microsoftsentinelcommunity","azure-sentinel-solution-forcepoint-dlp","2022-05-09","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","Forcepoint_DLP","Forcepoint","Forcepoint DLP","The Forcepoint DLP (Data Loss Prevention) connector allows you to automatically export DLP incident data from Forcepoint DLP into Microsoft Sentinel in real-time. This enriches visibility into user activities and data loss incidents, enables further correlation with data from Azure workloads and other feeds, and improves monitoring capability with Workbooks inside Microsoft Sentinel.","[{""title"": """", ""description"": ""Follow step by step instructions in the [Forcepoint DLP documentation for Microsoft Sentinel](https://frcpnt.com/dlp-sentinel) to configure this connector."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20DLP/Data%20Connectors/Forcepoint%20DLP.json","true" +"CommonSecurityLog","Forcepoint NGFW","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20NGFW","microsoftsentinelcommunity","azure-sentinel-solution-forcepoint-ngfw","2022-05-25","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ForcepointNgfw","Forcepoint","[Deprecated] Forcepoint NGFW via Legacy Agent","The Forcepoint NGFW (Next Generation Firewall) connector allows you to automatically export user-defined Forcepoint NGFW logs into Microsoft Sentinel in real-time. This enriches visibility into user activities recorded by NGFW, enables further correlation with data from Azure workloads and other feeds, and improves monitoring capability with Workbooks inside Microsoft Sentinel.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python - version \n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}, {""title"": ""5. Forcepoint integration installation guide "", ""description"": ""To complete the installation of this Forcepoint product integration, follow the guide linked below.\n\n[Installation Guide >](https://frcpnt.com/ngfw-sentinel)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20NGFW/Data%20Connectors/FORCEPOINT_NGFW.json","true" +"CommonSecurityLog","Forcepoint NGFW","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20NGFW","microsoftsentinelcommunity","azure-sentinel-solution-forcepoint-ngfw","2022-05-25","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ForcepointNgfwAma","Forcepoint","[Deprecated] Forcepoint NGFW via AMA","The Forcepoint NGFW (Next Generation Firewall) connector allows you to automatically export user-defined Forcepoint NGFW logs into Microsoft Sentinel in real-time. This enriches visibility into user activities recorded by NGFW, enables further correlation with data from Azure workloads and other feeds, and improves monitoring capability with Workbooks inside Microsoft Sentinel.","[{""title"": """", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine.""}, {""title"": ""Step B. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address.""}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}, {""title"": ""3. Forcepoint integration installation guide "", ""description"": ""To complete the installation of this Forcepoint product integration, follow the guide linked below.\n\n[Installation Guide >](https://frcpnt.com/ngfw-sentinel)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forcepoint%20NGFW/Data%20Connectors/template_FORCEPOINT_NGFWAMA.json","true" +"Syslog","Forescout (Legacy)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forescout%20%28Legacy%29","azuresentinel","azure-sentinel-solution-forescout","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","Forescout","Forescout","Forescout","The [Forescout](https://www.forescout.com/) data connector provides the capability to ingest [Forescout events](https://docs.forescout.com/bundle/syslog-3-6-1-h/page/syslog-3-6-1-h.How-to-Work-with-the-Syslog-Plugin.html) into Microsoft Sentinel. Refer to [Forescout documentation](https://docs.forescout.com/bundle/syslog-msg-3-6-tn/page/syslog-msg-3-6-tn.About-Syslog-Messages-in-Forescout.html) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**ForescoutEvent**](https://aka.ms/sentinel-forescout-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**NOTE:** This data connector has been developed using Forescout Syslog Plugin version: v3.6"", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Server where the Forescout logs are forwarded.\n\n> Logs from Forescout Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n 2. Select **Apply below configuration to my machines** and select the facilities and severities.\n 3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure Forescout event forwarding"", ""description"": ""Follow the configuration steps below to get Forescout logs into Microsoft Sentinel.\n1. [Select an Appliance to Configure.](https://docs.forescout.com/bundle/syslog-3-6-1-h/page/syslog-3-6-1-h.Select-an-Appliance-to-Configure.html)\n2. [Follow these instructions](https://docs.forescout.com/bundle/syslog-3-6-1-h/page/syslog-3-6-1-h.Send-Events-To-Tab.html#pID0E0CE0HA) to forward alerts from the Forescout platform to a syslog server.\n3. [Configure](https://docs.forescout.com/bundle/syslog-3-6-1-h/page/syslog-3-6-1-h.Syslog-Triggers.html) the settings in the Syslog Triggers tab.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forescout%20%28Legacy%29/Data%20Connectors/Forescout_syslog.json","true" +"ForescoutOtAlert_CL","Forescout eyeInspect for OT Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forescout%20eyeInspect%20for%20OT%20Security","forescout","azure-sentinel-eyeinspectotsecurity","2025-07-10","","","Forescout Technologies","Partner","https://www.forescout.com/support","","domains","Forescout_eyeInspect_for_OT_Security","Forescout","Forescout eyeInspect for OT Security","Forescout eyeInspect for OT Security connector allows you to connect Asset/Alert information from Forescout eyeInspect OT platform with Microsoft Sentinel, to view and analyze data using Log Analytics Tables and Workbooks. This gives you more insight into OT organization network and improves security operation capabilities.","[{""title"": ""Forescout eyeInspect OT Microsoft Sentinel Integration"", ""description"": ""Instructions on how to configure Forescout eyeInspect Microsoft Sentinel Integration are provided at Forescout eyeInspect Documentation Portal"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forescout%20eyeInspect%20for%20OT%20Security/Data%20Connectors/Forescout%20eyeInspect%20for%20OT%20Security.json","true" +"ForescoutOtAsset_CL","Forescout eyeInspect for OT Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forescout%20eyeInspect%20for%20OT%20Security","forescout","azure-sentinel-eyeinspectotsecurity","2025-07-10","","","Forescout Technologies","Partner","https://www.forescout.com/support","","domains","Forescout_eyeInspect_for_OT_Security","Forescout","Forescout eyeInspect for OT Security","Forescout eyeInspect for OT Security connector allows you to connect Asset/Alert information from Forescout eyeInspect OT platform with Microsoft Sentinel, to view and analyze data using Log Analytics Tables and Workbooks. This gives you more insight into OT organization network and improves security operation capabilities.","[{""title"": ""Forescout eyeInspect OT Microsoft Sentinel Integration"", ""description"": ""Instructions on how to configure Forescout eyeInspect Microsoft Sentinel Integration are provided at Forescout eyeInspect Documentation Portal"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Forescout%20eyeInspect%20for%20OT%20Security/Data%20Connectors/Forescout%20eyeInspect%20for%20OT%20Security.json","true" +"ForescoutComplianceStatus_CL","ForescoutHostPropertyMonitor","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ForescoutHostPropertyMonitor","forescout","azure-sentinel-solution-forescout","2022-06-28","","","Forescout Technologies","Partner","https://www.forescout.com/support","","domains","ForescoutHostPropertyMonitor","Forescout","Forescout Host Property Monitor","The Forescout Host Property Monitor connector allows you to connect host/policy/compliance properties from Forescout platform with Microsoft Sentinel, to view, create custom incidents, and improve investigation. This gives you more insight into your organization network and improves your security operation capabilities.","[{""title"": """", ""description"": ""Instructions on how to configure Forescout Microsoft Sentinel plugin are provided at Forescout Documentation Portal (https://docs.forescout.com/bundle/microsoft-sentinel-module-v2-0-0-h)"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Forescout Plugin requirement"", ""description"": ""Please make sure Forescout Microsoft Sentinel plugin is running on Forescout platform""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ForescoutHostPropertyMonitor/Data%20Connectors/ForescoutHostPropertyMonitor.json","true" +"ForescoutHostProperties_CL","ForescoutHostPropertyMonitor","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ForescoutHostPropertyMonitor","forescout","azure-sentinel-solution-forescout","2022-06-28","","","Forescout Technologies","Partner","https://www.forescout.com/support","","domains","ForescoutHostPropertyMonitor","Forescout","Forescout Host Property Monitor","The Forescout Host Property Monitor connector allows you to connect host/policy/compliance properties from Forescout platform with Microsoft Sentinel, to view, create custom incidents, and improve investigation. This gives you more insight into your organization network and improves your security operation capabilities.","[{""title"": """", ""description"": ""Instructions on how to configure Forescout Microsoft Sentinel plugin are provided at Forescout Documentation Portal (https://docs.forescout.com/bundle/microsoft-sentinel-module-v2-0-0-h)"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Forescout Plugin requirement"", ""description"": ""Please make sure Forescout Microsoft Sentinel plugin is running on Forescout platform""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ForescoutHostPropertyMonitor/Data%20Connectors/ForescoutHostPropertyMonitor.json","true" +"ForescoutPolicyStatus_CL","ForescoutHostPropertyMonitor","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ForescoutHostPropertyMonitor","forescout","azure-sentinel-solution-forescout","2022-06-28","","","Forescout Technologies","Partner","https://www.forescout.com/support","","domains","ForescoutHostPropertyMonitor","Forescout","Forescout Host Property Monitor","The Forescout Host Property Monitor connector allows you to connect host/policy/compliance properties from Forescout platform with Microsoft Sentinel, to view, create custom incidents, and improve investigation. This gives you more insight into your organization network and improves your security operation capabilities.","[{""title"": """", ""description"": ""Instructions on how to configure Forescout Microsoft Sentinel plugin are provided at Forescout Documentation Portal (https://docs.forescout.com/bundle/microsoft-sentinel-module-v2-0-0-h)"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Forescout Plugin requirement"", ""description"": ""Please make sure Forescout Microsoft Sentinel plugin is running on Forescout platform""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ForescoutHostPropertyMonitor/Data%20Connectors/ForescoutHostPropertyMonitor.json","true" +"CommonSecurityLog","ForgeRock Common Audit for CEF","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ForgeRock%20Common%20Audit%20for%20CEF","publisherid_test","offerid_test","2022-05-04","","","Forgerock","Partner","https://www.forgerock.com/support","","domains","ForgeRock","ForgeRock Inc","[Deprecated] ForgeRock Identity Platform","The ForgeRock Identity Platform provides a single common auditing framework. Extract and aggregate log data across the entire platform with common audit (CAUD) event handlers and unique IDs so that it can be tracked holistically. Open and extensible, you can leverage audit logging and reporting capabilities for integration with Microsoft Sentinel via this CAUD for CEF connector.","[{""title"": ""Configuration for the ForgeRock Common Audit (CAUD) for Microsoft Sentinel"", ""description"": ""In ForgeRock, install and configure this Common Audit (CAUD) for Microsoft Sentinel per the documentation at https://github.com/javaservlets/SentinelAuditEventHandler. Next, in Azure, follow the below CEF steps.""}, {""title"": ""\n\n\n1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ForgeRock%20Common%20Audit%20for%20CEF/Data%20Connectors/ForgeRock_CEF.json","true" +"CommonSecurityLog","Fortinet FortiGate Next-Generation Firewall connector for Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiGate%20Next-Generation%20Firewall%20connector%20for%20Microsoft%20Sentinel","azuresentinel","azure-sentinel-solution-fortinetfortigate","2021-08-13","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","Fortinet","Fortinet","[Deprecated] Fortinet via Legacy Agent","The Fortinet firewall connector allows you to easily connect your Fortinet logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python --version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py &&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Fortinet logs to Syslog agent"", ""description"": ""Set your Fortinet to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine\u2019s IP address.\n\n\nCopy the CLI commands below and:\n- Replace \""server <ip address>\"" with the Syslog agent's IP address.\n- Set the \""<facility_name>\"" to use the facility you configured in the Syslog agent (by default, the agent sets this to local4).\n- Set the Syslog port to 514, the port your agent uses.\n- To enable CEF format in early FortiOS versions, you may need to run the command \""set csv disable\"".\n\nFor more information, go to the [Fortinet Document Library](https://aka.ms/asi-syslog-fortinet-fortinetdocumentlibrary), choose your version, and use the \""Handbook\"" and \""Log Message Reference\"" PDFs.\n\n[Learn more >](https://aka.ms/CEF-Fortinet)"", ""instructions"": [{""parameters"": {""label"": ""Set up the connection using the CLI to run the following commands:"", ""value"": ""config log syslogd setting\n set status enable\nset format cef\nset port 514\nset server \nend"", ""rows"": 8}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python --version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py &&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiGate%20Next-Generation%20Firewall%20connector%20for%20Microsoft%20Sentinel/Data%20Connectors/Fortinet-FortiGate.json","true" +"CommonSecurityLog","Fortinet FortiGate Next-Generation Firewall connector for Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiGate%20Next-Generation%20Firewall%20connector%20for%20Microsoft%20Sentinel","azuresentinel","azure-sentinel-solution-fortinetfortigate","2021-08-13","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","FortinetAma","Fortinet","[Deprecated] Fortinet via AMA","The Fortinet firewall connector allows you to easily connect your Fortinet logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": """", ""description"": """", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine""}, {""title"": ""Step B. Forward Fortinet logs to Syslog agent"", ""description"": ""Set your Fortinet to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine\u2019s IP address.\n\n\nCopy the CLI commands below and:\n- Replace \""server <ip address>\"" with the Syslog agent's IP address.\n- Set the \""<facility_name>\"" to use the facility you configured in the Syslog agent (by default, the agent sets this to local4).\n- Set the Syslog port to 514, the port your agent uses.\n- To enable CEF format in early FortiOS versions, you may need to run the command \""set csv disable\"".\n\nFor more information, go to the [Fortinet Document Library](https://aka.ms/asi-syslog-fortinet-fortinetdocumentlibrary), choose your version, and use the \""Handbook\"" and \""Log Message Reference\"" PDFs.\n\n[Learn more >](https://aka.ms/CEF-Fortinet)"", ""instructions"": [{""parameters"": {""label"": ""Set up the connection using the CLI to run the following commands:"", ""value"": ""config log syslogd setting\n set status enable\nset format cef\nset port 514\nset server \nend"", ""rows"": 8}, ""type"": ""CopyableLabel""}]}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiGate%20Next-Generation%20Firewall%20connector%20for%20Microsoft%20Sentinel/Data%20Connectors/template_Fortinet-FortiGateAma.json","true" +"FncEventsDetections_CL","Fortinet FortiNDR Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiNDR%20Cloud","fortinet","fortindrcloud-sentinel","2024-01-15","","","Fortinet","Partner","https://www.fortinet.com/support","","domains","FortinetFortiNdrCloudDataConnector","Fortinet","Fortinet FortiNDR Cloud","The Fortinet FortiNDR Cloud data connector provides the capability to ingest [Fortinet FortiNDR Cloud](https://docs.fortinet.com/product/fortindr-cloud) data into Microsoft Sentinel using the FortiNDR Cloud API","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the FortiNDR Cloud API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/Fortinet%20FortiNDR%20Cloud/Parsers/Fortinet_FortiNDR_Cloud.md) to create the Kusto function alias **Fortinet_FortiNDR_Cloud**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Fortinet FortiNDR Cloud Logs Collection**\n\nThe provider should provide or link to detailed steps to configure the 'PROVIDER NAME APPLICATION NAME' API endpoint so that the Azure Function can authenticate to it successfully, get its authorization key or token, and pull the appliance's logs into Microsoft Sentinel.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Fortinet FortiNDR Cloud connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the as well as the FortiNDR Cloud API credentials (available in FortiNDR Cloud account management), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": ""**Azure Resource Manager (ARM) Template**\n\nUse this method for automated deployment of the Fortinet FortiNDR Cloud connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-FortinetFortiNDR-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**(Make sure using the same location as your Resource Group, and got the location supports Flex Consumption. \n3. Enter the **Workspace ID**, **Workspace Key**, **AwsAccessKeyId**, **AwsSecretAccessKey**, and/or Other required fields. \n4. Click **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""MetaStream Credentials"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **FortiNDR Cloud Account Code** are required to retrieve event data.""}, {""name"": ""API Credentials"", ""description"": ""**FortiNDR Cloud API Token**, **FortiNDR Cloud Account UUID** are required to retrieve detection data.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiNDR%20Cloud/Data%20Connectors/FortinetFortiNdrCloud_API_AzureFunctionApp.json","true" +"FncEventsObservation_CL","Fortinet FortiNDR Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiNDR%20Cloud","fortinet","fortindrcloud-sentinel","2024-01-15","","","Fortinet","Partner","https://www.fortinet.com/support","","domains","FortinetFortiNdrCloudDataConnector","Fortinet","Fortinet FortiNDR Cloud","The Fortinet FortiNDR Cloud data connector provides the capability to ingest [Fortinet FortiNDR Cloud](https://docs.fortinet.com/product/fortindr-cloud) data into Microsoft Sentinel using the FortiNDR Cloud API","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the FortiNDR Cloud API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/Fortinet%20FortiNDR%20Cloud/Parsers/Fortinet_FortiNDR_Cloud.md) to create the Kusto function alias **Fortinet_FortiNDR_Cloud**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Fortinet FortiNDR Cloud Logs Collection**\n\nThe provider should provide or link to detailed steps to configure the 'PROVIDER NAME APPLICATION NAME' API endpoint so that the Azure Function can authenticate to it successfully, get its authorization key or token, and pull the appliance's logs into Microsoft Sentinel.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Fortinet FortiNDR Cloud connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the as well as the FortiNDR Cloud API credentials (available in FortiNDR Cloud account management), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": ""**Azure Resource Manager (ARM) Template**\n\nUse this method for automated deployment of the Fortinet FortiNDR Cloud connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-FortinetFortiNDR-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**(Make sure using the same location as your Resource Group, and got the location supports Flex Consumption. \n3. Enter the **Workspace ID**, **Workspace Key**, **AwsAccessKeyId**, **AwsSecretAccessKey**, and/or Other required fields. \n4. Click **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""MetaStream Credentials"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **FortiNDR Cloud Account Code** are required to retrieve event data.""}, {""name"": ""API Credentials"", ""description"": ""**FortiNDR Cloud API Token**, **FortiNDR Cloud Account UUID** are required to retrieve detection data.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiNDR%20Cloud/Data%20Connectors/FortinetFortiNdrCloud_API_AzureFunctionApp.json","true" +"FncEventsSuricata_CL","Fortinet FortiNDR Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiNDR%20Cloud","fortinet","fortindrcloud-sentinel","2024-01-15","","","Fortinet","Partner","https://www.fortinet.com/support","","domains","FortinetFortiNdrCloudDataConnector","Fortinet","Fortinet FortiNDR Cloud","The Fortinet FortiNDR Cloud data connector provides the capability to ingest [Fortinet FortiNDR Cloud](https://docs.fortinet.com/product/fortindr-cloud) data into Microsoft Sentinel using the FortiNDR Cloud API","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the FortiNDR Cloud API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses a parser based on a Kusto Function to normalize fields. [Follow these steps](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/Fortinet%20FortiNDR%20Cloud/Parsers/Fortinet_FortiNDR_Cloud.md) to create the Kusto function alias **Fortinet_FortiNDR_Cloud**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Fortinet FortiNDR Cloud Logs Collection**\n\nThe provider should provide or link to detailed steps to configure the 'PROVIDER NAME APPLICATION NAME' API endpoint so that the Azure Function can authenticate to it successfully, get its authorization key or token, and pull the appliance's logs into Microsoft Sentinel.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Fortinet FortiNDR Cloud connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the as well as the FortiNDR Cloud API credentials (available in FortiNDR Cloud account management), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": ""**Azure Resource Manager (ARM) Template**\n\nUse this method for automated deployment of the Fortinet FortiNDR Cloud connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-FortinetFortiNDR-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**(Make sure using the same location as your Resource Group, and got the location supports Flex Consumption. \n3. Enter the **Workspace ID**, **Workspace Key**, **AwsAccessKeyId**, **AwsSecretAccessKey**, and/or Other required fields. \n4. Click **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""MetaStream Credentials"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **FortiNDR Cloud Account Code** are required to retrieve event data.""}, {""name"": ""API Credentials"", ""description"": ""**FortiNDR Cloud API Token**, **FortiNDR Cloud Account UUID** are required to retrieve detection data.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiNDR%20Cloud/Data%20Connectors/FortinetFortiNdrCloud_API_AzureFunctionApp.json","true" +"CommonSecurityLog","Fortinet FortiWeb Cloud WAF-as-a-Service connector for Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiWeb%20Cloud%20WAF-as-a-Service%20connector%20for%20Microsoft%20Sentinel","azuresentinel","azure-sentinel-solution-fortiwebcloud","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","FortinetFortiWeb","Microsoft","[Deprecated] Fortinet FortiWeb Web Application Firewall via Legacy Agent","The [fortiweb](https://www.fortinet.com/products/web-application-firewall/fortiweb) data connector provides the capability to ingest Threat Analytics and events into Microsoft Sentinel.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiWeb%20Cloud%20WAF-as-a-Service%20connector%20for%20Microsoft%20Sentinel/Data%20Connectors/Fortiweb.json","true" +"CommonSecurityLog","Fortinet FortiWeb Cloud WAF-as-a-Service connector for Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiWeb%20Cloud%20WAF-as-a-Service%20connector%20for%20Microsoft%20Sentinel","azuresentinel","azure-sentinel-solution-fortiwebcloud","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","FortinetFortiWebAma","Microsoft","Fortinet FortiWeb Web Application Firewall via AMA","The [fortiweb](https://www.fortinet.com/products/web-application-firewall/fortiweb) data connector provides the capability to ingest Threat Analytics and events into Microsoft Sentinel.","[{""title"": """", ""description"": """", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine""}, {""title"": ""Step B. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address.""}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Fortinet%20FortiWeb%20Cloud%20WAF-as-a-Service%20connector%20for%20Microsoft%20Sentinel/Data%20Connectors/template_FortiwebAma.json","true" +"","GDPR Compliance & Data Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GDPR%20Compliance%20%26%20Data%20Security","azuresentinel","azure-sentinel-solution-gdpr-compliance","2025-10-08","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"Garrison_ULTRARemoteLogs_CL","Garrison ULTRA","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Garrison%20ULTRA","garrisontechnologyltd1725375696148","microsoft-sentinel-solution-garrison-ultra","2024-10-04","","","Garrison","Partner","https://support.ultra.garrison.com","","domains","GarrisonULTRARemoteLogs","Garrison","Garrison ULTRA Remote Logs","The [Garrison ULTRA](https://www.garrison.com/en/garrison-ultra-cloud-platform) Remote Logs connector allows you to ingest Garrison ULTRA Remote Logs into Microsoft Sentinel.","[{""title"": ""Deployment - Azure Resource Manager (ARM) Template"", ""description"": ""These steps outline the automated deployment of the Garrison ULTRA Remote Logs data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Frefs%2Fheads%2Fmaster%2FSolutions%2FGarrison%2520ULTRA%2FData%2520Connectors%2FGarrisonULTRARemoteLogs%2Fazuredeploy_DataCollectionResources.json) \t\t\t\n2. Provide the required details such as Resource Group, Microsoft Sentinel Workspace and ingestion configurations \n> **NOTE:** It is recommended to create a new Resource Group for deployment of these resources.\n3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n4. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Garrison ULTRA"", ""description"": ""To use this data connector you must have an active [Garrison ULTRA](https://www.garrison.com/en/garrison-ultra-cloud-platform) license.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Garrison%20ULTRA/Data%20Connectors/GarrisonULTRARemoteLogs/GarrisonULTRARemoteLogs_ConnectorUI.json","true" +"Gigamon_CL","Gigamon Connector","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Gigamon%20Connector","gigamon-inc","microsoft-sentinel-solution-gigamon","","","","Gigamon","Partner","https://www.gigamon.com/","","domains","GigamonDataConnector","Gigamon","Gigamon AMX Data Connector","Use this data connector to integrate with Gigamon Application Metadata Exporter (AMX) and get data sent directly to Microsoft Sentinel. ","[{""title"": ""Gigamon Data Connector"", ""description"": ""1. Application Metadata Exporter (AMX) application converts the output from the Application Metadata Intelligence (AMI) in CEF format into JSON format and sends it to the cloud tools and Kafka.\n 2. The AMX application can be deployed only on a V Series Node and can be connected to Application Metadata Intelligence running on a physical node or a virtual machine.\n 3. The AMX application and the AMI are managed by GigaVUE-FM. This application is supported on VMware ESXi, VMware NSX-T, AWS and Azure.\n "", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Gigamon%20Connector/Data%20Connectors/Connector_Analytics_Gigamon.json","true" +"GitHubAuditLogsV2_CL","GitHub","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GitHub","microsoftcorporation1622712991604","sentinel4github","2021-10-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GitHubAuditDefinitionV2","Microsoft","GitHub Enterprise Audit Log (via Codeless Connector Framework) (Preview)","The GitHub audit log connector provides the capability to ingest GitHub logs into Microsoft Sentinel. By connecting GitHub audit logs into Microsoft Sentinel, you can view this data in workbooks, use it to create custom alerts, and improve your investigation process.

**Note:** If you intended to ingest GitHub subscribed events into Microsoft Sentinel, please refer to GitHub (using Webhooks) Connector from ""**Data Connectors**"" gallery.","[{""title"": ""Connect the GitHub Enterprise-level Audit Log to Microsoft Sentinel"", ""description"": ""Enable GitHub audit logs. \n Follow [this guide](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens#creating-a-personal-access-token-classic) to create or find your personal access token."", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Github Enterprise API URL"", ""columnValue"": ""properties.addOnAttributes.ApiUrl""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add Enterprise"", ""title"": ""Add Enterprise"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""parameters"": {""content"": ""Enter your Github Enterprise API URL and API key. Github Enterprise API URL formats:\n* `https://api.github.com/enterprises/{enterprise}`\n* `https://api.{subdomain}.ghe.com/enterprises/{enterprise}`""}, ""type"": ""Markdown""}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Github Enterprise API URL"", ""placeholder"": ""Your Github Enterprise API URL"", ""type"": ""text"", ""name"": ""ApiUrl""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""Enter API Key"", ""type"": ""password"", ""name"": ""apikey""}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""GitHub API personal access token"", ""description"": ""To enable polling for the Enterprise audit log, ensure the authenticated user is an Enterprise admin and has a GitHub personal access token (classic) with the `read:audit_log` scope.""}, {""name"": ""GitHub Enterprise type"", ""description"": ""This connector will only function with GitHub Enterprise Cloud; it will not support GitHub Enterprise Server.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GitHub/Data%20Connectors/GitHubAuditLogs_CCF/GitHubAuditLogs_ConnectorDefinition.json","true" +"GitHubAuditLogPolling_CL","GitHub","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GitHub","microsoftcorporation1622712991604","sentinel4github","2021-10-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GitHubEcAuditLogPolling","GitHub","[Deprecated] GitHub Enterprise Audit Log","The GitHub audit log connector provides the capability to ingest GitHub logs into Microsoft Sentinel. By connecting GitHub audit logs into Microsoft Sentinel, you can view this data in workbooks, use it to create custom alerts, and improve your investigation process.

**Note:** If you intended to ingest GitHub subscribed events into Microsoft Sentinel, please refer to GitHub (using Webhooks) Connector from ""**Data Connectors**"" gallery.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": ""Connect the GitHub Enterprise Organization-level Audit Log to Microsoft Sentinel"", ""description"": ""Enable GitHub audit logs. \n Follow [this guide](https://docs.github.com/en/github/authenticating-to-github/keeping-your-account-and-data-secure/creating-a-personal-access-token) to create or find your personal access token."", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""Organization Name"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{placeHolder1}}"", ""placeHolderValue"": """"}]}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""GitHub API personal access token"", ""description"": ""You need a GitHub personal access token to enable polling for the organization audit log. You may use either a classic token with 'read:org' scope OR a fine-grained token with 'Administration: Read-only' scope.""}, {""name"": ""GitHub Enterprise type"", ""description"": ""This connector will only function with GitHub Enterprise Cloud; it will not support GitHub Enterprise Server. ""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GitHub/Data%20Connectors/azuredeploy_GitHub_native_poller_connector.json","true" +"githubscanaudit_CL","GitHub","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GitHub","microsoftcorporation1622712991604","sentinel4github","2021-10-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GitHubWebhook","Microsoft","GitHub (using Webhooks)","The [GitHub](https://www.github.com) webhook data connector provides the capability to ingest GitHub subscribed events into Microsoft Sentinel using [GitHub webhook events](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads). The connector provides ability to get events into Microsoft Sentinel which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

**Note:** If you are intended to ingest Github Audit logs, Please refer to GitHub Enterprise Audit Log Connector from ""**Data Connectors**"" gallery.","[{""title"": """", ""description"": "">**NOTE:** This connector has been built on http trigger based Azure Function. And it provides an endpoint to which github will be connected through it's webhook capability and posts the subscribed events into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Github Webhook connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the GitHub data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-GitHubwebhookAPI-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region and deploy. \n3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the GitHub webhook data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the [Azure Function App](https://aka.ms/sentinel-GitHubWebhookAPI-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration. \n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional) - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""**Post Deployment steps**\n\n""}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""STEP 1 - To get the Azure Function url"", ""description"": "" 1. Go to Azure function Overview page and Click on \""Functions\"" in the left blade.\n 2. Click on the function called \""GithubwebhookConnector\"".\n 3. Go to \""GetFunctionurl\"" and copy the function url.""}, {""title"": ""STEP 2 - Configure Webhook to Github Organization"", ""description"": ""1. Go to [GitHub](https://www.github.com) and open your account and click on \""Your Organizations.\""\n 2. Click on Settings.\n 3. Click on \""Webhooks\"" and enter the function app url which was copied from above STEP 1 under payload URL textbox. \n 4. Choose content type as \""application/json\"". \n 5. Subscribe for events and Click on \""Add Webhook\""""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""*Now we are done with the github Webhook configuration. Once the github events triggered and after the delay of 20 to 30 mins (As there will be a dealy for LogAnalytics to spin up the resources for the first time), you should be able to see all the transactional events from the Github into LogAnalytics workspace table called \""githubscanaudit_CL\"".*\n\n For more details, Click [here](https://aka.ms/sentinel-gitHubwebhooksteps)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GitHub/Data%20Connectors/GithubWebhook/GithubWebhook_API_FunctionApp.json","true" +"Syslog","GitLab","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GitLab","azuresentinel","azure-sentinel-solution-gitlab","2022-04-27","2022-06-27","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GitLab","Microsoft","[Deprecated] GitLab","The [GitLab](https://about.gitlab.com/solutions/devops-platform/) connector allows you to easily connect your GitLab (GitLab Enterprise Edition - Standalone) logs with Microsoft Sentinel. This gives you more security insight into your organization's DevOps pipelines.","[{""title"": ""Configuration"", ""description"": "">This data connector depends on three parsers based on a Kusto Function to work as expected [**GitLab Access Logs**](https://aka.ms/sentinel-GitLabAccess-parser), [**GitLab Audit Logs**](https://aka.ms/sentinel-GitLabAudit-parser) and [**GitLab Application Logs**](https://aka.ms/sentinel-GitLabApp-parser) which are deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n2. Select **Apply below configuration to my machines** and select the facilities and severities.\n3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GitLab/Data%20Connectors/Connector_Syslog_GitLab.json","true" +"","Global Secure Access","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Global%20Secure%20Access","azuresentinel","azure-sentinel-solution-globalsecureaccess","2024-04-08","","","Microsoft Corporation","Microsoft","https://learn.microsoft.com/en-us/entra/global-secure-access/overview-what-is-global-secure-access","","domains","","","","","","","","false" +"ApigeeX_CL","Google Apigee","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Apigee","azuresentinel","azure-sentinel-solution-googleapigeex","2021-10-28","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","ApigeeXDataConnector","Google","[DEPRECATED] Google ApigeeX","The [Google ApigeeX](https://cloud.google.com/apigee/docs) data connector provides the capability to ingest ApigeeX audit logs into Microsoft Sentinel using the GCP Logging API. Refer to [GCP Logging API documentation](https://cloud.google.com/logging/docs/reference/v2/rest) for more information.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the GCP API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**ApigeeX**](https://aka.ms/sentinel-ApigeeXDataConnector-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuring GCP and obtaining credentials**\n\n1. Make sure that Logging API is [enabled](https://cloud.google.com/apis/docs/getting-started#enabling_apis). \n\n2. [Create service account](https://cloud.google.com/iam/docs/creating-managing-service-accounts) with [required permissions](https://cloud.google.com/iam/docs/audit-logging#audit_log_permissions) and [get service account key json file](https://cloud.google.com/iam/docs/creating-managing-service-account-keys).\n\n3. Prepare GCP project ID where ApigeeX is located.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as Azure Blob Storage connection string and container name, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ApigeeXDataConnector-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Google Cloud Platform Project Id**, **Google Cloud Platform Credentials File Content**, **Microsoft Sentinel Workspace Id**, **Microsoft Sentinel Shared Key**\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-ApigeeXDataConnector-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions.\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tRESOURCE_NAMES\n\t\tCREDENTIALS_FILE_CONTENT\n\t\tWORKSPACE_ID\n\t\tSHARED_KEY\n\t\tlogAnalyticsUri (Optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://WORKSPACE_ID.ods.opinsights.azure.us`. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""GCP service account"", ""description"": ""GCP service account with permissions to read logs is required for GCP Logging API. Also json file with service account key is required. See the documentation to learn more about [required permissions](https://cloud.google.com/iam/docs/audit-logging#audit_log_permissions), [creating service account](https://cloud.google.com/iam/docs/creating-managing-service-accounts) and [creating service account key](https://cloud.google.com/iam/docs/creating-managing-service-account-keys).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Apigee/Data%20Connectors/ApigeeX_FunctionApp.json","true" +"GCPApigee","Google Apigee","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Apigee","azuresentinel","azure-sentinel-solution-googleapigeex","2021-10-28","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GoogleApigeeXLogsCCPDefinition","Microsoft","Google ApigeeX (via Codeless Connector Framework)","The Google ApigeeX data connector provides the capability to ingest Audit logs into Microsoft Sentinel using the Google Apigee API. Refer to [Google Apigee API](https://cloud.google.com/apigee/docs/reference/apis/apigee/rest/?apix=true) documentation for more information.","[{""instructions"": [{""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Apigee/Data%20Connectors/ApigeeXReadme.md) for log setup and authentication setup tutorial.\n Log set up script: [Click Here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPApigeeLogSetup)\nAuthentication set up script: [Click here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPInitialAuthenticationSetup)"", ""govScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Apigee/Data%20Connectors/ApigeeXReadme.md) for log setup and authentication setup tutorial.\n Log set up script: [Click Here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPApigeeLogSetup)\nAuthentication set up script: [Click here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPInitialAuthenticationSetupGov)""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""TenantId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Enable ApigeeX logs \n In the Google Cloud Console, enable Apigee API, if not enabled previously, and save the changes.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \n To enable ApigeeX Logs for Microsoft Sentinel, click on Add new collector button, provide the required information in the pop up and click on Connect.""}}, {""type"": ""GCPGrid"", ""parameters"": {}}, {""type"": ""GCPContextPane"", ""parameters"": {}}], ""title"": ""Connect Google ApigeeX to Microsoft Sentinel\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Apigee/Data%20Connectors/GoogleApigeeXLog_CCP/GoogleApigeeXLog_ConnectorDefinition.json","true" +"GCPAuditLogs","Google Cloud Platform Audit Logs","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Audit%20Logs","azuresentinel","azure-sentinel-solution-gcpauditlogs-api","2023-03-29","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GCPAuditLogsDefinition","Microsoft","GCP Pub/Sub Audit Logs","The Google Cloud Platform (GCP) audit logs, ingested from Microsoft Sentinel's connector, enables you to capture three types of audit logs: admin activity logs, data access logs, and access transparency logs. Google cloud audit logs record a trail that practitioners can use to monitor access and detect potential threats across Google Cloud Platform (GCP) resources.","[{""instructions"": [{""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Set up your GCP environment \n You must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider and service account with permissions to get and consume from subscription. \n Terraform provides API for the IAM that creates the resources. [Link to Terraform scripts](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation)."", ""govScript"": ""#### 1. Set up your GCP environment \n You must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider and service account with permissions to get and consume from subscription. \n Terraform provides API for the IAM that creates the resources. [Link to Gov Terraform scripts](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov).""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""TenantId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Connect new collectors \n To enable GCP Audit Logs for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""GCPGrid"", ""parameters"": {}}, {""type"": ""GCPContextPane"", ""parameters"": {}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Audit%20Logs/Data%20Connectors/GCPAuditLogs_ccp/data_connector_definition.json","true" +"","Google Cloud Platform BigQuery","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20BigQuery","azuresentinel","azure-sentinel-solution-gcpbigquery","2023-03-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"GCPMonitoring","Google Cloud Platform Cloud Monitoring","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Cloud%20Monitoring","azuresentinel","azure-sentinel-solution-gcpmonitoring","2022-07-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GCPMonitorCCPDefinition","Microsoft","Google Cloud Platform Cloud Monitoring (via Codeless Connector Framework)","The Google Cloud Platform Cloud Monitoring data connector ingests Monitoring logs from Google Cloud into Microsoft Sentinel using the Google Cloud Monitoring API. Refer to [Cloud Monitoring API](https://cloud.google.com/monitoring/api/v3) documentation for more details.","[{""title"": ""Connect Google Cloud Platform Cloud Monitoring to Microsoft Sentinel"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Setup GCP Monitoring Integration\n To fetch logs from GCP Cloud Monitoring to Sentinel **Project ID** of Google cloud is required.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Chose the **Metric Type**\n To collect logs from Google Cloud Monitoring provide the required Metric type.\n\nFor more details, refer to [Google Cloud Metrics](https://cloud.google.com/monitoring/api/metrics_gcp).""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. OAuth Credentials\n To Fetch Oauth client id and client secret refer to this [documentation](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Cloud%20Monitoring/Data%20Connectors/Readme.md).""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 4. Connect to Sentinel\n Click on **Connect** to start pulling monitoring logs from Google Cloud into Microsoft Sentinel.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""GCP Project ID"", ""name"": ""projectid"", ""required"": true, ""description"": ""Enter your Google Cloud Project ID.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Metric Type"", ""type"": ""text"", ""name"": ""metrictype"", ""required"": true, ""description"": ""Provide the metric types you want to collect logs for with comma separated.For example: compute.googleapis.com/instance/disk/write_bytes_count,compute.googleapis.com/instance/uptime_total""}}, {""type"": ""OAuthForm"", ""parameters"": {""clientIdLabel"": ""Client ID"", ""clientSecretLabel"": ""Client Secret"", ""connectButtonLabel"": ""Connect"", ""disconnectButtonLabel"": ""Disconnect""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Metric Type"", ""columnValue"": ""properties.addOnAttributes.metrictype""}, {""columnName"": ""Project ID"", ""columnValue"": ""properties.addOnAttributes.projectid""}], ""menuItems"": [""DeleteConnector""]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Cloud%20Monitoring/Data%20Connectors/GCPMonitoringLogs_CCP/GCPCloudMonitoringLogs_ConnectorDefinition.json","true" +"GCP_MONITORING_CL","Google Cloud Platform Cloud Monitoring","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Cloud%20Monitoring","azuresentinel","azure-sentinel-solution-gcpmonitoring","2022-07-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GCPMonitorDataConnector","Google","[DEPRECATED] Google Cloud Platform Cloud Monitoring","The Google Cloud Platform Cloud Monitoring data connector provides the capability to ingest [GCP Monitoring metrics](https://cloud.google.com/monitoring/api/metrics_gcp) into Microsoft Sentinel using the GCP Monitoring API. Refer to [GCP Monitoring API documentation](https://cloud.google.com/monitoring/api/v3) for more information.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the GCP API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**GCP_MONITORING**](https://aka.ms/sentinel-GCPMonitorDataConnector-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuring GCP and obtaining credentials**\n\n1. [Create service account](https://cloud.google.com/iam/docs/creating-managing-service-accounts) with Monitoring Viewer role and [get service account key json file](https://cloud.google.com/iam/docs/creating-managing-service-account-keys).\n\n2. Prepare the list of GCP projects to get metrics from. [Learn more about GCP projects](https://cloud.google.com/resource-manager/docs/cloud-platform-resource-hierarchy).\n\n3. Prepare the list of [GCP metric types](https://cloud.google.com/monitoring/api/metrics_gcp)""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as Azure Blob Storage connection string and container name, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-GCPMonitorDataConnector-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Google Cloud Platform Project Id List**, **Google Cloud Platform Metric Types List**, **Google Cloud Platform Credentials File Content**, **Microsoft Sentinel Workspace Id**, **Microsoft Sentinel Shared Key**\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GCPMonitorDataConnector-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions.\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGCP_PROJECT_ID\n\t\tGCP_METRICS\n\t\tGCP_CREDENTIALS_FILE_CONTENT\n\t\tWORKSPACE_ID\n\t\tSHARED_KEY\n\t\tlogAnalyticsUri (Optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://WORKSPACE_ID.ods.opinsights.azure.us`. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""GCP service account"", ""description"": ""GCP service account with permissions to read Cloud Monitoring metrics is required for GCP Monitoring API (required *Monitoring Viewer* role). Also json file with service account key is required. See the documentation to learn more about [creating service account](https://cloud.google.com/iam/docs/creating-managing-service-accounts) and [creating service account key](https://cloud.google.com/iam/docs/creating-managing-service-account-keys).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Cloud%20Monitoring/Data%20Connectors/GCP_Monitor_API_FunctionApp.json","true" +"GCPCloudRun","Google Cloud Platform Cloud Run","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Cloud%20Run","azuresentinel","azure-sentinel-solution-gcpcloudrun","2021-07-30","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GCPCloudRunLogs_ConnectorDefinition","Microsoft","GCP Cloud Run (via Codeless Connector Framework)","The GCP Cloud Run data connector provides the capability to ingest Cloud Run request logs into Microsoft Sentinel using Pub/Sub. Refer the [Cloud Run Overview](https://cloud.google.com/run/docs/logging) for more details.","[{""instructions"": [{""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Cloud%20Run/Data%20Connectors/README.md) for log setup and authentication setup tutorial.\n\n Find the Log set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPCloudRunLogsSetup)\n & the Authentication set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPInitialAuthenticationSetup)"", ""govScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Cloud%20Run/Data%20Connectors/README.md) for log setup and authentication setup tutorial.\n\n Find the Log set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPCloudRunLogsSetup)\n & the Authentication set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPInitialAuthenticationSetupGov)""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""TenantId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Enable Cloud Run logs \n In the Google Cloud Console, enable cloud logging if not enabled previously, and save the changes.Deploy or update your Cloud Run services with logging enabled.\n\n Reference Link: [Link to documentation](https://cloud.google.com/run/docs/setup)""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \n To enable GCP Cloud Run Request Logs for Microsoft Sentinel, click on Add new collector button, provide the required information in the pop up and click on Connect.""}}, {""type"": ""GCPGrid"", ""parameters"": {}}, {""type"": ""GCPContextPane"", ""parameters"": {}}], ""title"": ""Connect GCP Cloud Run to Microsoft Sentinel\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Cloud%20Run/Data%20Connectors/GCPCloudRunLog_CCF/GCPCloudRunLogs_ConnectorDefinition.json","true" +"GCPComputeEngine","Google Cloud Platform Compute Engine","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Compute%20Engine","azuresentinel","azure-sentinel-solution-gcpcomputeengine","2022-07-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GCPComputeEngineLogsCCPDefinition","Microsoft","Google Cloud Platform Compute Engine (via Codeless Connector Framework)","The Google Cloud Platform Compute Engine data connector provides the capability to ingest Compute Engine Audit logs into Microsoft Sentinel using the Google Cloud Compute Engine API. Refer to [Cloud Compute Engine API](https://cloud.google.com/compute/docs/reference/rest/v1) documentation for more information.","[{""instructions"": [{""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Compute%20Engine/Data%20Connectors/GCPComputeEngineReadme.md) for log setup and authentication setup tutorial.\n Log set up script: [Click Here](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPComputeEngineLogsSetup/GCPComputeEngineLogSetup.tf)\nAuthentication set up script: [Click here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPInitialAuthenticationSetup)"", ""govScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Compute%20Engine/Data%20Connectors/GCPComputeEngineReadme.md) for log setup and authentication setup tutorial.\n Log set up script: [Click Here](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPComputeEngineLogsSetup/GCPComputeEngineLogSetup.tf)\nAuthentication set up script: [Click here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPInitialAuthenticationSetupGov)""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""TenantId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Enable Compute Engine logs \n In the Google Cloud Console, enable Compute Engine API, if not enabled previously, and save the changes.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \n To enable Compute Engine Logs for Microsoft Sentinel, click on Add new collector button, provide the required information in the pop up and click on Connect.""}}, {""type"": ""GCPGrid"", ""parameters"": {}}, {""type"": ""GCPContextPane"", ""parameters"": {}}], ""title"": ""Connect GCP Compute Engine to Microsoft Sentinel\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Compute%20Engine/Data%20Connectors/GCPComputeEngineLog_CCP/GCPComputeEngineLog_ConnectorDefinition.json","true" +"GCPFirewallLogs","Google Cloud Platform Firewall Logs","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Firewall%20Logs","","","","","","","","","","","GCPFirewallLogsCCPDefinition","Microsoft","GCP Pub/Sub Firewall Logs","The Google Cloud Platform (GCP) firewall logs, enable you to capture network inbound and outbound activity to monitor access and detect potential threats across Google Cloud Platform (GCP) resources.","[{""instructions"": [{""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Set up your GCP environment \n You must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider and service account with permissions to get and consume from subscription. \n Terraform provides API for the IAM that creates the resources. [Link to Terraform scripts](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation)\n Connector tutorial: [Link to tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup) ."", ""govScript"": ""#### 1. Set up your GCP environment \n You must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider and service account with permissions to get and consume from subscription. \n Terraform provides API for the IAM that creates the resources. [Link to Gov Terraform scripts](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov)\n Connector tutorial: [Link to tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup).""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""TenantId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Enable Firewall logs \nIn your GCP account, navigate to the Firewall section. Here, you can either create a new rule or edit an existing one that you want to monitor. Once you open the rule, switch the toggle button under the **Logs** section to **On**, and save the changes.\n\nFor more information: [Link to documentation](https://cloud.google.com/firewall/docs/using-firewall-rules-logging?_gl=1*1no0nhk*_ga*NDMxNDIxODI3LjE3MjUyNjUzMzc.*_ga_WH2QY8WWF5*MTcyNTUyNzc4MS4xMS4xLjE3MjU1MjgxNTIuNDYuMC4w)""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \n To enable GCP Firewall Logs for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""GCPGrid"", ""parameters"": {}}, {""type"": ""GCPContextPane"", ""parameters"": {}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Firewall%20Logs/Data%20Connectors/GCPFirewallLogs_ccp/GCP_ConnectorDefinition.json","true" +"GCPLoadBalancerLogs_CL","Google Cloud Platform Load Balancer Logs","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Load%20Balancer%20Logs","azuresentinel","azure-sentinel-solution-gcploadbalancerlogs-api","2025-02-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GCPFLoadBalancerLogsCCPDefinition","Microsoft","GCP Pub/Sub Load Balancer Logs (via Codeless Connector Platform).","Google Cloud Platform (GCP) Load Balancer logs provide detailed insights into network traffic, capturing both inbound and outbound activities. These logs are used for monitoring access patterns and identifying potential security threats across GCP resources. Additionally, these logs also include GCP Web Application Firewall (WAF) logs, enhancing the ability to detect and mitigate risks effectively.","[{""instructions"": [{""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Set up your GCP environment \n You must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider and service account with permissions to get and consume from subscription. \n Terraform provides API for the IAM that creates the resources. [Link to Terraform scripts](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation)."", ""govScript"": ""#### 1. Set up your GCP environment \n You must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider and service account with permissions to get and consume from subscription. \n Terraform provides API for the IAM that creates the resources. [Link to Gov Terraform scripts](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov).""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""PoolId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Enable Load Balancer logs \nIn your GCP account, navigate to the Load Balancer section. In here you can nevigate to [**Backend Service**] -> [**Edit**], once you are in the [**Backend Service**] on the [**Logging**] section **enable** the checkbox of [**Enable Logs**]. Once you open the rule, switch the toggle button under the **Logs** section to **On**, and save the changes.\n\nFor more information: [Link to documentation](https://cloud.google.com/load-balancing/docs/https/https-logging-monitoring)""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \n To enable GCP Load Balancer Logs for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""GCPGrid"", ""parameters"": {}}, {""type"": ""GCPContextPane"", ""parameters"": {}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Load%20Balancer%20Logs/Data%20Connectors/GCPFLoadBalancerLogs_GCP_CCP/GCPFLoadBalancerLogs_Definition.json","true" +"GoogleCloudSCC","Google Cloud Platform Security Command Center","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Security%20Command%20Center","azuresentinel","azure-sentinel-solution-gcpscclogs-api","2023-09-11","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GoogleSCCDefinition","Microsoft","Google Security Command Center","The Google Cloud Platform (GCP) Security Command Center is a comprehensive security and risk management platform for Google Cloud, ingested from Sentinel's connector. It offers features such as asset inventory and discovery, vulnerability and threat detection, and risk mitigation and remediation to help you gain insight into your organization's security and data attack surface. This integration enables you to perform tasks related to findings and assets more effectively.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Set up your GCP environment \n You must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider and service account with permissions to get and consume from subscription. \n Terraform provides API for the IAM that creates the resources. [Link to Terraform scripts](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation).""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""PoolId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Connect new collectors \n To enable GCP SCC for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""GCPGrid"", ""parameters"": {}}, {""type"": ""GCPContextPane"", ""parameters"": {}}]}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": false, ""write"": false, ""delete"": false, ""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20Security%20Command%20Center/Data%20Connectors/GCPSecurityCommandCenter.json","true" +"GCPVPCFlow","Google Cloud Platform VPC Flow Logs","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20VPC%20Flow%20Logs","azuresentinel","azure-sentinel-solution-gcpvpcflowlogs-api","2025-02-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GCPVPCFlowLogsCCPDefinition","Microsoft","GCP Pub/Sub VPC Flow Logs (via Codeless Connector Framework)","The Google Cloud Platform (GCP) VPC Flow Logs enable you to capture network traffic activity at the VPC level, allowing you to monitor access patterns, analyze network performance, and detect potential threats across GCP resources.","[{""instructions"": [{""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Set up your GCP environment \n You must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider, and service account with permissions to get and consume from the subscription. \n To configure this data connector, execute the following Terraform scripts:\n 1. Setup Required Resources: [Configuration Guide](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPVPCFlowLogsSetup/readme.md)\n 2. Setup Authentication: [Authentication tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup). Note: If Authentication is already setup using another GCP data connector , kindly skip this step and use the existing service account and workload identity pool."", ""govScript"": ""#### 1. Set up your GCP environment \n You must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider, and service account with permissions to get and consume from the subscription. \n To configure this data connector, execute the following Terraform scripts:\n 1. Setup Required Resources: [Configuration Guide]https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPVPCFlowLogsSetup/readme.md)\n 2. Setup Authentication: [Authentication tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup). Note: If Authentication is already setup using another GCP data connector , kindly skip this step and use the existing service account and workload identity pool.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""TenantId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Enable VPC Flow Logs \nIn your GCP account, navigate to the VPC network section. Select the subnet you want to monitor and enable Flow Logs under the Logging section.\n\nFor more information: [Google Cloud Documentation](https://cloud.google.com/vpc/docs/using-flow-logs)""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \n To enable GCP VPC Flow Logs for Microsoft Sentinel, click the Add new collector button, fill in the required information in the context pane, and click Connect.""}}, {""type"": ""GCPGrid"", ""parameters"": {}}, {""type"": ""GCPContextPane"", ""parameters"": {}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Cloud%20Platform%20VPC%20Flow%20Logs/Data%20Connectors/GCPVPCFlowLogs_GCP_CCP/GCPVPCFlowLogs_ConnectorDefinition.json","true" +"GKEAPIServer","Google Kubernetes Engine","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Kubernetes%20Engine","azuresentinel","azure-sentinel-solution-gkelogs-api","2025-04-04","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GKECCPDefinition","Microsoft","Google Kubernetes Engine (via Codeless Connector Framework)","The Google Kubernetes Engine (GKE) Logs enable you to capture cluster activity, workload behavior, and security events, allowing you to monitor Kubernetes workloads, analyze performance, and detect potential threats across GKE clusters.","[{""instructions"": [{""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Set up your GCP environment \nYou must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider, and service account with permissions to get and consume from the subscription.\n\nTo configure this data connector, execute the following Terraform scripts:\n\n1. Setup Required Resources: [Configuration Guide](https://github.com/Alekhya0824/GithubValidationREPO/blob/main/gke/Readme.md)\n2. Setup Authentication: [Authentication tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup). Note: If Authentication is already setup using another GCP data connector, kindly skip this step and use the existing service account and workload identity pool."", ""govScript"": ""#### 1. Set up your GCP environment \nYou must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider, and service account with permissions to get and consume from the subscription.\n\nTo configure this data connector, execute the following Terraform scripts:\n\n1. Setup Required Resources: [Configuration Guide](https://github.com/Alekhya0824/GithubValidationREPO/blob/main/gke/Readme.md)\n2. Setup Authentication: [Authentication tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup). Note: If Authentication is already setup using another GCP data connector, kindly skip this step and use the existing service account and workload identity pool.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""TenantId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Enable Kubernetes Engine Logging \nIn your GCP account, navigate to the Kubernetes Engine section. Enable Cloud Logging for your clusters. Within Cloud Logging, ensure that the specific logs you want to ingest\u2014such as API server, scheduler, controller manager, HPA decision, and application logs\u2014are enabled for effective monitoring and security analysis.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \nTo enable GKE Logs for Microsoft Sentinel, click the **Add new collector** button, fill in the required information in the context pane, and click **Connect**.""}}, {""type"": ""GCPGrid"", ""parameters"": {""collectors"": [{""name"": ""Audit Collector"", ""tableName"": ""GKEAudit""}, {""name"": ""API Server Collector"", ""tableName"": ""GKEAPIServer""}, {""name"": ""Scheduler Collector"", ""tableName"": ""GKEScheduler""}, {""name"": ""Controller Manager Collector"", ""tableName"": ""GKEControllerManager""}, {""name"": ""HPA Decision Collector"", ""tableName"": ""GKEHPADecision""}, {""name"": ""Application Collector"", ""tableName"": ""GKEApplication""}]}}, {""type"": ""GCPContextPane"", ""parameters"": {}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Kubernetes%20Engine/Data%20Connectors/GoogleKubernetesEngineLogs_ccp/GoogleKubernetesEngineLogs_ConnectorDefinition.json","true" +"GKEApplication","Google Kubernetes Engine","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Kubernetes%20Engine","azuresentinel","azure-sentinel-solution-gkelogs-api","2025-04-04","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GKECCPDefinition","Microsoft","Google Kubernetes Engine (via Codeless Connector Framework)","The Google Kubernetes Engine (GKE) Logs enable you to capture cluster activity, workload behavior, and security events, allowing you to monitor Kubernetes workloads, analyze performance, and detect potential threats across GKE clusters.","[{""instructions"": [{""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Set up your GCP environment \nYou must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider, and service account with permissions to get and consume from the subscription.\n\nTo configure this data connector, execute the following Terraform scripts:\n\n1. Setup Required Resources: [Configuration Guide](https://github.com/Alekhya0824/GithubValidationREPO/blob/main/gke/Readme.md)\n2. Setup Authentication: [Authentication tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup). Note: If Authentication is already setup using another GCP data connector, kindly skip this step and use the existing service account and workload identity pool."", ""govScript"": ""#### 1. Set up your GCP environment \nYou must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider, and service account with permissions to get and consume from the subscription.\n\nTo configure this data connector, execute the following Terraform scripts:\n\n1. Setup Required Resources: [Configuration Guide](https://github.com/Alekhya0824/GithubValidationREPO/blob/main/gke/Readme.md)\n2. Setup Authentication: [Authentication tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup). Note: If Authentication is already setup using another GCP data connector, kindly skip this step and use the existing service account and workload identity pool.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""TenantId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Enable Kubernetes Engine Logging \nIn your GCP account, navigate to the Kubernetes Engine section. Enable Cloud Logging for your clusters. Within Cloud Logging, ensure that the specific logs you want to ingest\u2014such as API server, scheduler, controller manager, HPA decision, and application logs\u2014are enabled for effective monitoring and security analysis.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \nTo enable GKE Logs for Microsoft Sentinel, click the **Add new collector** button, fill in the required information in the context pane, and click **Connect**.""}}, {""type"": ""GCPGrid"", ""parameters"": {""collectors"": [{""name"": ""Audit Collector"", ""tableName"": ""GKEAudit""}, {""name"": ""API Server Collector"", ""tableName"": ""GKEAPIServer""}, {""name"": ""Scheduler Collector"", ""tableName"": ""GKEScheduler""}, {""name"": ""Controller Manager Collector"", ""tableName"": ""GKEControllerManager""}, {""name"": ""HPA Decision Collector"", ""tableName"": ""GKEHPADecision""}, {""name"": ""Application Collector"", ""tableName"": ""GKEApplication""}]}}, {""type"": ""GCPContextPane"", ""parameters"": {}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Kubernetes%20Engine/Data%20Connectors/GoogleKubernetesEngineLogs_ccp/GoogleKubernetesEngineLogs_ConnectorDefinition.json","true" +"GKEAudit","Google Kubernetes Engine","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Kubernetes%20Engine","azuresentinel","azure-sentinel-solution-gkelogs-api","2025-04-04","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GKECCPDefinition","Microsoft","Google Kubernetes Engine (via Codeless Connector Framework)","The Google Kubernetes Engine (GKE) Logs enable you to capture cluster activity, workload behavior, and security events, allowing you to monitor Kubernetes workloads, analyze performance, and detect potential threats across GKE clusters.","[{""instructions"": [{""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Set up your GCP environment \nYou must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider, and service account with permissions to get and consume from the subscription.\n\nTo configure this data connector, execute the following Terraform scripts:\n\n1. Setup Required Resources: [Configuration Guide](https://github.com/Alekhya0824/GithubValidationREPO/blob/main/gke/Readme.md)\n2. Setup Authentication: [Authentication tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup). Note: If Authentication is already setup using another GCP data connector, kindly skip this step and use the existing service account and workload identity pool."", ""govScript"": ""#### 1. Set up your GCP environment \nYou must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider, and service account with permissions to get and consume from the subscription.\n\nTo configure this data connector, execute the following Terraform scripts:\n\n1. Setup Required Resources: [Configuration Guide](https://github.com/Alekhya0824/GithubValidationREPO/blob/main/gke/Readme.md)\n2. Setup Authentication: [Authentication tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup). Note: If Authentication is already setup using another GCP data connector, kindly skip this step and use the existing service account and workload identity pool.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""TenantId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Enable Kubernetes Engine Logging \nIn your GCP account, navigate to the Kubernetes Engine section. Enable Cloud Logging for your clusters. Within Cloud Logging, ensure that the specific logs you want to ingest\u2014such as API server, scheduler, controller manager, HPA decision, and application logs\u2014are enabled for effective monitoring and security analysis.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \nTo enable GKE Logs for Microsoft Sentinel, click the **Add new collector** button, fill in the required information in the context pane, and click **Connect**.""}}, {""type"": ""GCPGrid"", ""parameters"": {""collectors"": [{""name"": ""Audit Collector"", ""tableName"": ""GKEAudit""}, {""name"": ""API Server Collector"", ""tableName"": ""GKEAPIServer""}, {""name"": ""Scheduler Collector"", ""tableName"": ""GKEScheduler""}, {""name"": ""Controller Manager Collector"", ""tableName"": ""GKEControllerManager""}, {""name"": ""HPA Decision Collector"", ""tableName"": ""GKEHPADecision""}, {""name"": ""Application Collector"", ""tableName"": ""GKEApplication""}]}}, {""type"": ""GCPContextPane"", ""parameters"": {}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Kubernetes%20Engine/Data%20Connectors/GoogleKubernetesEngineLogs_ccp/GoogleKubernetesEngineLogs_ConnectorDefinition.json","true" +"GKEControllerManager","Google Kubernetes Engine","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Kubernetes%20Engine","azuresentinel","azure-sentinel-solution-gkelogs-api","2025-04-04","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GKECCPDefinition","Microsoft","Google Kubernetes Engine (via Codeless Connector Framework)","The Google Kubernetes Engine (GKE) Logs enable you to capture cluster activity, workload behavior, and security events, allowing you to monitor Kubernetes workloads, analyze performance, and detect potential threats across GKE clusters.","[{""instructions"": [{""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Set up your GCP environment \nYou must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider, and service account with permissions to get and consume from the subscription.\n\nTo configure this data connector, execute the following Terraform scripts:\n\n1. Setup Required Resources: [Configuration Guide](https://github.com/Alekhya0824/GithubValidationREPO/blob/main/gke/Readme.md)\n2. Setup Authentication: [Authentication tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup). Note: If Authentication is already setup using another GCP data connector, kindly skip this step and use the existing service account and workload identity pool."", ""govScript"": ""#### 1. Set up your GCP environment \nYou must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider, and service account with permissions to get and consume from the subscription.\n\nTo configure this data connector, execute the following Terraform scripts:\n\n1. Setup Required Resources: [Configuration Guide](https://github.com/Alekhya0824/GithubValidationREPO/blob/main/gke/Readme.md)\n2. Setup Authentication: [Authentication tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup). Note: If Authentication is already setup using another GCP data connector, kindly skip this step and use the existing service account and workload identity pool.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""TenantId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Enable Kubernetes Engine Logging \nIn your GCP account, navigate to the Kubernetes Engine section. Enable Cloud Logging for your clusters. Within Cloud Logging, ensure that the specific logs you want to ingest\u2014such as API server, scheduler, controller manager, HPA decision, and application logs\u2014are enabled for effective monitoring and security analysis.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \nTo enable GKE Logs for Microsoft Sentinel, click the **Add new collector** button, fill in the required information in the context pane, and click **Connect**.""}}, {""type"": ""GCPGrid"", ""parameters"": {""collectors"": [{""name"": ""Audit Collector"", ""tableName"": ""GKEAudit""}, {""name"": ""API Server Collector"", ""tableName"": ""GKEAPIServer""}, {""name"": ""Scheduler Collector"", ""tableName"": ""GKEScheduler""}, {""name"": ""Controller Manager Collector"", ""tableName"": ""GKEControllerManager""}, {""name"": ""HPA Decision Collector"", ""tableName"": ""GKEHPADecision""}, {""name"": ""Application Collector"", ""tableName"": ""GKEApplication""}]}}, {""type"": ""GCPContextPane"", ""parameters"": {}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Kubernetes%20Engine/Data%20Connectors/GoogleKubernetesEngineLogs_ccp/GoogleKubernetesEngineLogs_ConnectorDefinition.json","true" +"GKEHPADecision","Google Kubernetes Engine","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Kubernetes%20Engine","azuresentinel","azure-sentinel-solution-gkelogs-api","2025-04-04","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GKECCPDefinition","Microsoft","Google Kubernetes Engine (via Codeless Connector Framework)","The Google Kubernetes Engine (GKE) Logs enable you to capture cluster activity, workload behavior, and security events, allowing you to monitor Kubernetes workloads, analyze performance, and detect potential threats across GKE clusters.","[{""instructions"": [{""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Set up your GCP environment \nYou must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider, and service account with permissions to get and consume from the subscription.\n\nTo configure this data connector, execute the following Terraform scripts:\n\n1. Setup Required Resources: [Configuration Guide](https://github.com/Alekhya0824/GithubValidationREPO/blob/main/gke/Readme.md)\n2. Setup Authentication: [Authentication tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup). Note: If Authentication is already setup using another GCP data connector, kindly skip this step and use the existing service account and workload identity pool."", ""govScript"": ""#### 1. Set up your GCP environment \nYou must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider, and service account with permissions to get and consume from the subscription.\n\nTo configure this data connector, execute the following Terraform scripts:\n\n1. Setup Required Resources: [Configuration Guide](https://github.com/Alekhya0824/GithubValidationREPO/blob/main/gke/Readme.md)\n2. Setup Authentication: [Authentication tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup). Note: If Authentication is already setup using another GCP data connector, kindly skip this step and use the existing service account and workload identity pool.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""TenantId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Enable Kubernetes Engine Logging \nIn your GCP account, navigate to the Kubernetes Engine section. Enable Cloud Logging for your clusters. Within Cloud Logging, ensure that the specific logs you want to ingest\u2014such as API server, scheduler, controller manager, HPA decision, and application logs\u2014are enabled for effective monitoring and security analysis.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \nTo enable GKE Logs for Microsoft Sentinel, click the **Add new collector** button, fill in the required information in the context pane, and click **Connect**.""}}, {""type"": ""GCPGrid"", ""parameters"": {""collectors"": [{""name"": ""Audit Collector"", ""tableName"": ""GKEAudit""}, {""name"": ""API Server Collector"", ""tableName"": ""GKEAPIServer""}, {""name"": ""Scheduler Collector"", ""tableName"": ""GKEScheduler""}, {""name"": ""Controller Manager Collector"", ""tableName"": ""GKEControllerManager""}, {""name"": ""HPA Decision Collector"", ""tableName"": ""GKEHPADecision""}, {""name"": ""Application Collector"", ""tableName"": ""GKEApplication""}]}}, {""type"": ""GCPContextPane"", ""parameters"": {}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Kubernetes%20Engine/Data%20Connectors/GoogleKubernetesEngineLogs_ccp/GoogleKubernetesEngineLogs_ConnectorDefinition.json","true" +"GKEScheduler","Google Kubernetes Engine","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Kubernetes%20Engine","azuresentinel","azure-sentinel-solution-gkelogs-api","2025-04-04","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GKECCPDefinition","Microsoft","Google Kubernetes Engine (via Codeless Connector Framework)","The Google Kubernetes Engine (GKE) Logs enable you to capture cluster activity, workload behavior, and security events, allowing you to monitor Kubernetes workloads, analyze performance, and detect potential threats across GKE clusters.","[{""instructions"": [{""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Set up your GCP environment \nYou must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider, and service account with permissions to get and consume from the subscription.\n\nTo configure this data connector, execute the following Terraform scripts:\n\n1. Setup Required Resources: [Configuration Guide](https://github.com/Alekhya0824/GithubValidationREPO/blob/main/gke/Readme.md)\n2. Setup Authentication: [Authentication tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup). Note: If Authentication is already setup using another GCP data connector, kindly skip this step and use the existing service account and workload identity pool."", ""govScript"": ""#### 1. Set up your GCP environment \nYou must have the following GCP resources defined and configured: topic, subscription for the topic, workload identity pool, workload identity provider, and service account with permissions to get and consume from the subscription.\n\nTo configure this data connector, execute the following Terraform scripts:\n\n1. Setup Required Resources: [Configuration Guide](https://github.com/Alekhya0824/GithubValidationREPO/blob/main/gke/Readme.md)\n2. Setup Authentication: [Authentication tutorial](https://learn.microsoft.com/en-us/azure/sentinel/connect-google-cloud-platform?tabs=terraform%2Cauditlogs#gcp-authentication-setup). Note: If Authentication is already setup using another GCP data connector, kindly skip this step and use the existing service account and workload identity pool.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""TenantId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Enable Kubernetes Engine Logging \nIn your GCP account, navigate to the Kubernetes Engine section. Enable Cloud Logging for your clusters. Within Cloud Logging, ensure that the specific logs you want to ingest\u2014such as API server, scheduler, controller manager, HPA decision, and application logs\u2014are enabled for effective monitoring and security analysis.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \nTo enable GKE Logs for Microsoft Sentinel, click the **Add new collector** button, fill in the required information in the context pane, and click **Connect**.""}}, {""type"": ""GCPGrid"", ""parameters"": {""collectors"": [{""name"": ""Audit Collector"", ""tableName"": ""GKEAudit""}, {""name"": ""API Server Collector"", ""tableName"": ""GKEAPIServer""}, {""name"": ""Scheduler Collector"", ""tableName"": ""GKEScheduler""}, {""name"": ""Controller Manager Collector"", ""tableName"": ""GKEControllerManager""}, {""name"": ""HPA Decision Collector"", ""tableName"": ""GKEHPADecision""}, {""name"": ""Application Collector"", ""tableName"": ""GKEApplication""}]}}, {""type"": ""GCPContextPane"", ""parameters"": {}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Kubernetes%20Engine/Data%20Connectors/GoogleKubernetesEngineLogs_ccp/GoogleKubernetesEngineLogs_ConnectorDefinition.json","true" +"","Google Threat Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Google%20Threat%20Intelligence","virustotalsl1681486227461","azure-sentinel-solution-google","2024-10-26","2024-10-26","","Google","Partner","https://www.virustotal.com/gui/contact-us","","domains","","","","","","","","false" +"GCPCDN","GoogleCloudPlatformCDN","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformCDN","azuresentinel","azure-sentinel-solution-gcp-cdn","2025-03-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GCPCDNLogsCCPDefinition","Microsoft","Google Cloud Platform CDN (via Codeless Connector Framework)","The Google Cloud Platform CDN data connector provides the capability to ingest Cloud CDN Audit logs and Cloud CDN Traffic logs into Microsoft Sentinel using the Compute Engine API. Refer the [Product overview](https://cloud.google.com/cdn/docs/overview) document for more details.","[{""instructions"": [{""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformCDN/Data%20Connectors/README.md) for log setup and authentication setup tutorial.\n\n Find the Log set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPCDNLogsSetup)\n & the Authentication set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPInitialAuthenticationSetup)"", ""govScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformCDN/Data%20Connectors/README.md) for log setup and authentication setup tutorial.\n\n Find the Log set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPCDNLogsSetup)\n & the Authentication set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPInitialAuthenticationSetupGov)""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""TenantId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Enable CDN logs \n In the Google Cloud Console, enable cloud logging if not enabled previously, and save the changes. Navigate to Cloud CDN section and click on Add origin to create backends as per link provided below. \n\n Reference Link: [Link to documentation](https://cloud.google.com/cdn/docs/using-cdn)""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \n To enable GCP Cloud CDN Logs for Microsoft Sentinel, click on Add new collector button, provide the required information in the pop up and click on Connect.""}}, {""type"": ""GCPGrid"", ""parameters"": {}}, {""type"": ""GCPContextPane"", ""parameters"": {}}], ""title"": ""Connect GCP CDN to Microsoft Sentinel\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformCDN/Data%20Connectors/GCPCDNLogs_ccp/GCPCDNLogs_ConnectorDefinition.json","true" +"GCP_DNS_CL","GoogleCloudPlatformDNS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformDNS","azuresentinel","azure-sentinel-solution-gcpdns","2022-07-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GCPDNSDataConnector","Google","[DEPRECATED] Google Cloud Platform DNS","The Google Cloud Platform DNS data connector provides the capability to ingest [Cloud DNS query logs](https://cloud.google.com/dns/docs/monitoring#using_logging) and [Cloud DNS audit logs](https://cloud.google.com/dns/docs/audit-logging) into Microsoft Sentinel using the GCP Logging API. Refer to [GCP Logging API documentation](https://cloud.google.com/logging/docs/api) for more information.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the GCP API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**GCPCloudDNS**](https://aka.ms/sentinel-GCPDNSDataConnector-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuring GCP and obtaining credentials**\n\n1. Make sure that Logging API is [enabled](https://cloud.google.com/apis/docs/getting-started#enabling_apis). \n\n2. [Create service account](https://cloud.google.com/iam/docs/creating-managing-service-accounts) with Logs Viewer role (or at least with \""logging.logEntries.list\"" permission) and [get service account key json file](https://cloud.google.com/iam/docs/creating-managing-service-account-keys).\n\n3. Prepare the list of GCP resources (organizations, folders, projects) to get logs from. [Learn more about GCP resources](https://cloud.google.com/resource-manager/docs/cloud-platform-resource-hierarchy).""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as Azure Blob Storage connection string and container name, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-GCPDNSDataConnector-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Google Cloud Platform Resource Names**, **Google Cloud Platform Credentials File Content**, **Microsoft Sentinel Workspace Id**, **Microsoft Sentinel Shared Key**\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GCPDNSDataConnector-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions.\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tRESOURCE_NAMES\n\t\tCREDENTIALS_FILE_CONTENT\n\t\tWORKSPACE_ID\n\t\tSHARED_KEY\n\t\tlogAnalyticsUri (Optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://WORKSPACE_ID.ods.opinsights.azure.us`. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""GCP service account"", ""description"": ""GCP service account with permissions to read logs (with \""logging.logEntries.list\"" permission) is required for GCP Logging API. Also json file with service account key is required. See the documentation to learn more about [permissions](https://cloud.google.com/logging/docs/access-control#permissions_and_roles), [creating service account](https://cloud.google.com/iam/docs/creating-managing-service-accounts) and [creating service account key](https://cloud.google.com/iam/docs/creating-managing-service-account-keys).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformDNS/Data%20Connectors/GCP_DNS_API_FunctionApp.json","true" +"GCPDNS","GoogleCloudPlatformDNS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformDNS","azuresentinel","azure-sentinel-solution-gcpdns","2022-07-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GCPDNSLogsCCPDefinition","Microsoft","Google Cloud Platform DNS (via Codeless Connector Framework)","The Google Cloud Platform DNS data connector provides the capability to ingest Cloud DNS Query logs and Cloud DNS Audit logs into Microsoft Sentinel using the Google Cloud DNS API. Refer to [Cloud DNS API](https://cloud.google.com/dns/docs/reference/rest/v1) documentation for more information.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": "">**NOTE:** If both Azure Function and CCP connector are running simultaneously, duplicate data is populated in the tables.""}}, {""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformDNS/Data%20Connectors/README.md) for log setup and authentication setup tutorial.\n Log set up script: [Click Here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPDNS_CCPLogsSetup)\nAuthentication set up script: [Click here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPInitialAuthenticationSetup)"", ""govScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformDNS/Data%20Connectors/README.md) for log setup and authentication setup tutorial.\n Log set up script: [Click Here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPDNS_CCPLogsSetupGov)\nAuthentication set up script: [Click here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPInitialAuthenticationSetupGov)""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""TenantId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Enable DNS logs \n In the Google Cloud Console, navigate to Cloud DNS Section. Enable cloud logging if not enabled previously, and save the changes. Here, you can manage the existing zones, or create a new zone and create policies for the zone which you want to monitor.\n\nFor more information: [Link to documentation](https://cloud.google.com/dns/docs/zones/zones-overview)""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \n To enable GCP DNS Logs for Microsoft Sentinel, click on Add new collector button, provide the required information in the pop up and click on Connect.""}}, {""type"": ""GCPGrid"", ""parameters"": {}}, {""type"": ""GCPContextPane"", ""parameters"": {}}], ""title"": ""Connect GCP DNS to Microsoft Sentinel\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformDNS/Data%20Connectors/GCPDNSLog_CCP/GCPDNSLog_ConnectorDefinition.json","true" +"GCPIAM","GoogleCloudPlatformIAM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformIAM","azuresentinel","azure-sentinel-solution-gcpiam","2021-07-30","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GCPIAMCCPDefinition","Microsoft","Google Cloud Platform IAM (via Codeless Connector Framework)","The Google Cloud Platform IAM data connector provides the capability to ingest the Audit logs relating to Identity and Access Management (IAM) activities within Google Cloud into Microsoft Sentinel using the Google IAM API. Refer to [GCP IAM API](https://cloud.google.com/iam/docs/reference/rest) documentation for more information.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": "">**NOTE:** If both Azure Function and CCF connector are running parallelly, duplicate data is populated in the tables.""}}, {""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformIAM/Data%20Connectors/README.md) for log setup and authentication setup tutorial.\n Log set up script: [Click Here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPIAMCCPLogsSetup)\nAuthentication set up script: [Click here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPInitialAuthenticationSetup)"", ""govScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformIAM/Data%20Connectors/README.md) for log setup and authentication setup tutorial.\n Log set up script: [Click Here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPIAMCCPLogsSetup)\nAuthentication set up script: [Click here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPInitialAuthenticationSetupGov)""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""TenantId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. To enable IAM logs \n In your GCP account, navigate to the IAM section. From there, you can either create a new user or modify an existing user's role that you want to monitor. Be sure to save your changes..\n\nFor more information: [Link to documentation](https://cloud.google.com/assured-workloads/docs/iam-roles?hl=en)""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \n To enable GCPIAM Logs for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""GCPGrid"", ""parameters"": {}}, {""type"": ""GCPContextPane"", ""parameters"": {}}], ""title"": ""Connect GCP IAM to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformIAM/Data%20Connectors/GCPIAMLog_CCP/GCPIAMLog_ConnectorDefinition.json","true" +"GCP_IAM_CL","GoogleCloudPlatformIAM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformIAM","azuresentinel","azure-sentinel-solution-gcpiam","2021-07-30","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GCPIAMDataConnector","Google","[DEPRECATED] Google Cloud Platform IAM","The Google Cloud Platform Identity and Access Management (IAM) data connector provides the capability to ingest [GCP IAM logs](https://cloud.google.com/iam/docs/audit-logging) into Microsoft Sentinel using the GCP Logging API. Refer to [GCP Logging API documentation](https://cloud.google.com/logging/docs/api) for more information.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the GCP API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**GCP_IAM**](https://aka.ms/sentinel-GCPIAMDataConnector-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuring GCP and obtaining credentials**\n\n1. Make sure that Logging API is [enabled](https://cloud.google.com/apis/docs/getting-started#enabling_apis). \n\n2. (Optional) [Enable Data Access Audit logs](https://cloud.google.com/logging/docs/audit/configure-data-access#config-console-enable).\n\n3. [Create service account](https://cloud.google.com/iam/docs/creating-managing-service-accounts) with [required permissions](https://cloud.google.com/iam/docs/audit-logging#audit_log_permissions) and [get service account key json file](https://cloud.google.com/iam/docs/creating-managing-service-account-keys).\n\n4. Prepare the list of GCP resources (organizations, folders, projects) to get logs from. [Learn more about GCP resources](https://cloud.google.com/resource-manager/docs/cloud-platform-resource-hierarchy).""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as Azure Blob Storage connection string and container name, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-GCPIAMDataConnector-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Google Cloud Platform Resource Names**, **Google Cloud Platform Credentials File Content**, **Microsoft Sentinel Workspace Id**, **Microsoft Sentinel Shared Key**\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GCPIAMDataConnector-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions.\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tRESOURCE_NAMES\n\t\tCREDENTIALS_FILE_CONTENT\n\t\tWORKSPACE_ID\n\t\tSHARED_KEY\n\t\tlogAnalyticsUri (Optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://WORKSPACE_ID.ods.opinsights.azure.us`. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""GCP service account"", ""description"": ""GCP service account with permissions to read logs is required for GCP Logging API. Also json file with service account key is required. See the documentation to learn more about [required permissions](https://cloud.google.com/iam/docs/audit-logging#audit_log_permissions), [creating service account](https://cloud.google.com/iam/docs/creating-managing-service-accounts) and [creating service account key](https://cloud.google.com/iam/docs/creating-managing-service-account-keys).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformIAM/Data%20Connectors/GCP_IAM_API_FunctionApp.json","true" +"GCPIDS","GoogleCloudPlatformIDS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformIDS","azuresentinel","azure-sentinel-solution-gcpids","2022-07-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GCPCLOUDIDSLogsCCPDefinition","Microsoft","Google Cloud Platform Cloud IDS (via Codeless Connector Framework)","The Google Cloud Platform IDS data connector provides the capability to ingest Cloud IDS Traffic logs, Threat logs and Audit logs into Microsoft Sentinel using the Google Cloud IDS API. Refer to [Cloud IDS API](https://cloud.google.com/intrusion-detection-system/docs/audit-logging#google.cloud.ids.v1.IDS) documentation for more information.","[{""instructions"": [{""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformIDS/Data%20Connectors/README.md) for log setup and authentication setup tutorial.\n\n Find the Log set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPCloudIDSLogSetup)\n & the Authentication set up script: [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPInitialAuthenticationSetup)"", ""govScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformIDS/Data%20Connectors/README.md) for log setup and authentication setup tutorial.\n\n Find the Log set up script: [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPCloudIDSLogSetup)\n & the Authentication set up script: [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPInitialAuthenticationSetupGov)""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""TenantId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Enable IDS logs \n In the Google Cloud Console, enable Cloud IDS API, if not enabled previously. Create an IDS Endpoint and save the changes.\n\nFor more information on how to create and configure an IDS endpoint: [Link to documentation](https://cloud.google.com/intrusion-detection-system/docs/configuring-ids)""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \n To enable GCP IDS Logs for Microsoft Sentinel, click on Add new collector button, provide the required information in the pop up and click on Connect.""}}, {""type"": ""GCPGrid"", ""parameters"": {}}, {""type"": ""GCPContextPane"", ""parameters"": {}}], ""title"": ""Connect GCP Cloud IDS to Microsoft Sentinel\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformIDS/Data%20Connectors/GCPCloudIDSLog_CCP/GCPCloudIDSLog_ConnectorDefinition.json","true" +"GCPNAT","GoogleCloudPlatformNAT","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformNAT","azuresentinel","azure-sentinel-solution-gcp-nat","2025-05-29","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GCPNATLogsCCPDefinition","Microsoft","Google Cloud Platform NAT (via Codeless Connector Framework)","The Google Cloud Platform NAT data connector provides the capability to ingest Cloud NAT Audit logs and Cloud NAT Traffic logs into Microsoft Sentinel using the Compute Engine API. Refer the [Product overview](https://cloud.google.com/nat/docs/overview) document for more details.","[{""instructions"": [{""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformNAT/Data%20Connectors/README.md) for log setup and authentication setup tutorial.\n\n Find the Log set up script [**here**](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPCloudNATLogsSetup/GCPCloudNATLogsSetup.tf)\n & the Authentication set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPInitialAuthenticationSetup)"", ""govScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformNAT/Data%20Connectors/README.md) for log setup and authentication setup tutorial.\n\n Find the Log set up script [**here**](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPCloudNATLogsSetup/GCPCloudNATLogsSetup.tf)\n & the Authentication set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPInitialAuthenticationSetupGov)""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""TenantId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Enable NAT logs \n In the Google Cloud Console, enable cloud logging if not enabled previously, and save the changes. Navigate to Cloud NAT section and click on Add origin to create backends as per link provided below. \n\n Reference Link: [Link to documentation](https://cloud.google.com/nat/docs/monitoring)""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \n To enable GCP Cloud NAT Logs for Microsoft Sentinel, click on Add new collector button, provide the required information in the pop up and click on Connect.""}}, {""type"": ""GCPGrid"", ""parameters"": {}}, {""type"": ""GCPContextPane"", ""parameters"": {}}], ""title"": ""Connect GCP NAT to Microsoft Sentinel\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformNAT/Data%20Connectors/GCPNATLogs_ccp/GCPNATLogs_ConnectorDefinition.json","true" +"GCPNATAudit","GoogleCloudPlatformNAT","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformNAT","azuresentinel","azure-sentinel-solution-gcp-nat","2025-05-29","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GCPNATLogsCCPDefinition","Microsoft","Google Cloud Platform NAT (via Codeless Connector Framework)","The Google Cloud Platform NAT data connector provides the capability to ingest Cloud NAT Audit logs and Cloud NAT Traffic logs into Microsoft Sentinel using the Compute Engine API. Refer the [Product overview](https://cloud.google.com/nat/docs/overview) document for more details.","[{""instructions"": [{""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformNAT/Data%20Connectors/README.md) for log setup and authentication setup tutorial.\n\n Find the Log set up script [**here**](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPCloudNATLogsSetup/GCPCloudNATLogsSetup.tf)\n & the Authentication set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPInitialAuthenticationSetup)"", ""govScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformNAT/Data%20Connectors/README.md) for log setup and authentication setup tutorial.\n\n Find the Log set up script [**here**](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPCloudNATLogsSetup/GCPCloudNATLogsSetup.tf)\n & the Authentication set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPInitialAuthenticationSetupGov)""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""TenantId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Enable NAT logs \n In the Google Cloud Console, enable cloud logging if not enabled previously, and save the changes. Navigate to Cloud NAT section and click on Add origin to create backends as per link provided below. \n\n Reference Link: [Link to documentation](https://cloud.google.com/nat/docs/monitoring)""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \n To enable GCP Cloud NAT Logs for Microsoft Sentinel, click on Add new collector button, provide the required information in the pop up and click on Connect.""}}, {""type"": ""GCPGrid"", ""parameters"": {}}, {""type"": ""GCPContextPane"", ""parameters"": {}}], ""title"": ""Connect GCP NAT to Microsoft Sentinel\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformNAT/Data%20Connectors/GCPNATLogs_ccp/GCPNATLogs_ConnectorDefinition.json","true" +"GCPResourceManager","GoogleCloudPlatformResourceManager","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformResourceManager","azuresentinel","azure-sentinel-solution-gcp-rm","2025-03-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GCPResourceManagerLogsCCFDefinition","Microsoft","Google Cloud Platform Resource Manager (via Codeless Connector Framework)","The Google Cloud Platform Resource Manager data connector provides the capability to ingest Resource Manager [Admin Activity and Data Access Audit logs](https://cloud.google.com/resource-manager/docs/audit-logging) into Microsoft Sentinel using the Cloud Resource Manager API. Refer the [Product overview](https://cloud.google.com/resource-manager/docs/cloud-platform-resource-hierarchy) document for more details.","[{""instructions"": [{""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleCloudPlatformResourceManager/Data%20Connectors/README.md) for log setup and authentication setup tutorial.\n\n Find the Log set up script [**here**](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPResourceManagerLogsSetup/GCPResourceManagerLogSetup.tf)\n & the Authentication set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPInitialAuthenticationSetup)"", ""govScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleCloudPlatformResourceManager/Data%20Connectors/README.md) for log setup and authentication setup tutorial.\n\n Find the Log set up script [**here**](https://raw.githubusercontent.com/Azure/Azure-Sentinel/c1cb589dad1add228f78e629073a9b069ce52991/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPResourceManagerLogsSetup/GCPResourceManagerLogSetup.tf)\n & the Authentication set up script [**here**](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPInitialAuthenticationSetupGov)""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the Terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""TenantId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Enable Resource Manager logs \n In the Google Cloud Console, enable cloud resource manager API if not enabled previously, and save the changes. Make sure to have organization level IAM permissions for your account to see all logs in the resource hierarchy. You can refer the document links for different IAM permissions for access control with IAM at each level provided in this [link](https://cloud.google.com/resource-manager/docs/how-to)""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \n To enable GCP Resource Manager Logs for Microsoft Sentinel, click on Add new collector button, provide the required information in the pop up and click on Connect.""}}, {""type"": ""GCPGrid"", ""parameters"": {}}, {""type"": ""GCPContextPane"", ""parameters"": {}}], ""title"": ""Connect GCP Resource Manager to Microsoft Sentinel\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformResourceManager/Data%20Connectors/GCPResourceManagerAuditLogs_ccf/GCPResourceManagerAuditLogs_ConnectorDefinition.json","true" +"GCPCloudSQL","GoogleCloudPlatformSQL","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformSQL","azuresentinel","azure-sentinel-solution-gcpsql","2021-07-30","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","GCPCloudSQLCCFDefinition","Microsoft","GCP Cloud SQL (via Codeless Connector Framework)","The GCP Cloud SQL data connector provides the capability to ingest Audit logs into Microsoft Sentinel using the GCP Cloud SQL API. Refer to [GCP cloud SQL Audit Logs](https://cloud.google.com/sql/docs/mysql/audit-logging) documentation for more information.","[{""instructions"": [{""type"": ""MarkdownControlEnvBased"", ""parameters"": {""prodScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformSQL/Data%20Connectors/Readme.md) for log setup and authentication setup tutorial.\n Log set up script: [Click Here](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPCloudSQLLogsSetup/GCPCloudSQLLogsSetup.tf)\nAuthentication set up script: [Click here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation/GCPInitialAuthenticationSetup)"", ""govScript"": ""#### 1. Setup the GCP environment \n Ensure to have the following resources from the GCP Console:\n Project ID, Project Name, GCP Subscription name for the project, Workload Identity Pool ID, Workspace Identity Provider ID, and a Service Account to establish the connection.\n For more information, refer the [Connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformSQL/Data%20Connectors/Readme.md) for log setup and authentication setup tutorial.\n Log set up script: [Click Here](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPCloudSQLLogsSetup/GCPCloudSQLLogsSetup.tf)\nAuthentication set up script: [Click here](https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/GCP/Terraform/sentinel_resources_creation_gov/GCPInitialAuthenticationSetupGov)""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Tenant ID: A unique identifier that is used as an input in the terraform configuration within a GCP environment."", ""fillWith"": [""TenantId""], ""name"": ""TenantId"", ""disabled"": true}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. In the Google Cloud Console, enable Cloud SQL API, if not enabled previously, and save the changes.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \n To enable GCP Cloud SQL Logs for Microsoft Sentinel, click the Add new collector button, fill the required information in the context pane and click on Connect.""}}, {""type"": ""GCPGrid"", ""parameters"": {}}, {""type"": ""GCPContextPane"", ""parameters"": {}}], ""title"": ""Connect GCP Cloud SQL to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleCloudPlatformSQL/Data%20Connectors/GCPCloudSQLLog_CCF/GCPCloudSQLLog_ConnectorDefinition.json","true" +"","GoogleDirectory","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleDirectory","","","","","","","","","","","","","","","","","","false" +"GoogleWorkspaceReports","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceCCPDefinition","Microsoft","Google Workspace Activities (via Codeless Connector Framework)","The [Google Workspace](https://workspace.google.com/) Activities data connector provides the capability to ingest Activity Events from [Google Workspace API](https://developers.google.com/admin-sdk/reports/reference/rest/v1/activities/list) into Microsoft Sentinel.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the Google Reports API\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add https://portal.azure.com/TokenAuthorize/ExtensionName/Microsoft_Azure_Security_Insights as the Authorized redirect URIs.\n\t 4. Once you click Create, you will be provided with the Client ID and Client Secret. \n\tCopy these values and use them in the configuration steps below.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""Configure steps for the Google Reports API oauth access. Then, provide the required information below and click on Connect.\n>""}}, {""description"": ""Configure steps for the Google Reports API oauth access. Then, provide the required information below and click on Connect.\n>"", ""type"": ""OAuthForm"", ""parameters"": {""clientIdLabel"": ""Client ID"", ""clientSecretLabel"": ""Client Secret"", ""connectButtonLabel"": ""Connect"", ""disconnectButtonLabel"": ""Disconnect""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""name"", ""columnName"": ""Name""}, {""columnValue"": ""id"", ""columnName"": ""ID""}]}}], ""title"": ""Connect to Google Workspace to start collecting user activity logs into Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Google Workspace API access"", ""description"": ""Access to the Google Workspace activities API through Oauth are required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GoogleWorkspaceTemplate_ccp/GoogleWorkspaceReports_DataConnectorDefinition.json","true" +"GWorkspace_ReportsAPI_access_transparency_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" +"GWorkspace_ReportsAPI_admin_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" +"GWorkspace_ReportsAPI_calendar_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" +"GWorkspace_ReportsAPI_chat_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" +"GWorkspace_ReportsAPI_chrome_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" +"GWorkspace_ReportsAPI_context_aware_access_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" +"GWorkspace_ReportsAPI_data_studio_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" +"GWorkspace_ReportsAPI_drive_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" +"GWorkspace_ReportsAPI_gcp_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" +"GWorkspace_ReportsAPI_gplus_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" +"GWorkspace_ReportsAPI_groups_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" +"GWorkspace_ReportsAPI_groups_enterprise_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" +"GWorkspace_ReportsAPI_jamboard_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" +"GWorkspace_ReportsAPI_keep_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" +"GWorkspace_ReportsAPI_login_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" +"GWorkspace_ReportsAPI_meet_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" +"GWorkspace_ReportsAPI_mobile_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" +"GWorkspace_ReportsAPI_rules_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" +"GWorkspace_ReportsAPI_saml_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" +"GWorkspace_ReportsAPI_token_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" +"GWorkspace_ReportsAPI_user_accounts_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" +"GoogleWorkspaceReports_CL","GoogleWorkspaceReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports","azuresentinel","azure-sentinel-solution-googleworkspacereports","2022-01-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","GoogleWorkspaceReportsAPI","Google","[DEPRECATED] Google Workspace (G Suite)","The [Google Workspace](https://workspace.google.com/) data connector provides the capability to ingest Google Workspace Activity events into Microsoft Sentinel through the REST API. The connector provides ability to get [events](https://developers.google.com/admin-sdk/reports/v1/reference/activities) which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems, track who signs in and when, analyze administrator activity, understand how users create and share content, and more review events in your org.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Google Reports API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias GWorkspaceReports and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Parsers/GWorkspaceActivityReports.yaml), on the second line of the query, enter the hostname(s) of your GWorkspaceReports device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Ensure the prerequisites to obtain the Google Pickel String**\n\n""}, {""title"": """", ""description"": ""1. [Python 3 or above](https://www.python.org/downloads/) is installed.\n2. The [pip package management tool](https://www.geeksforgeeks.org/download-and-install-pip-latest-version/) is available.\n3. A Google Workspace domain with [API access enabled](https://support.google.com/a/answer/7281227?visit_id=637889155425319296-3895555646&rd=1).\n4. A Google account in that domain with administrator privileges.""}, {""title"": """", ""description"": ""**STEP 2 - Configuration steps for the Google Reports API**\n\n1. Login to Google cloud console with your Workspace Admin credentials https://console.cloud.google.com.\n2. Using the search option (available at the top middle), Search for ***APIs & Services***\n3. From ***APIs & Services*** -> ***Enabled APIs & Services***, enable **Admin SDK API** for this project.\n 4. Go to ***APIs & Services*** -> ***OAuth Consent Screen***. If not already configured, create a OAuth Consent Screen with the following steps:\n\t 1. Provide App Name and other mandatory information.\n\t 2. Add authorized domains with API Access Enabled.\n\t 3. In Scopes section, add **Admin SDK API** scope.\n\t 4. In Test Users section, make sure the domain admin account is added.\n 5. Go to ***APIs & Services*** -> ***Credentials*** and create OAuth 2.0 Client ID\n\t 1. Click on Create Credentials on the top and select Oauth client Id.\n\t 2. Select Web Application from the Application Type drop down.\n\t 3. Provide a suitable name to the Web App and add http://localhost:8081/ as one of the Authorized redirect URIs.\n\t 4. Once you click Create, download the JSON from the pop-up that appears. Rename this file to \""**credentials.json**\"".\n 6. To fetch Google Pickel String, run the [python script](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/get_google_pickle_string.py) from the same folder where credentials.json is saved.\n\t 1. When popped up for sign-in, use the domain admin account credentials to login.\n>**Note:** This script is supported only on Windows operating system.\n 7. From the output of the previous step, copy Google Pickle String (contained within single quotation marks) and keep it handy. It will be needed on Function App deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Workspace GooglePickleString readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Google Workspace data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelgworkspaceazuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **GooglePickleString** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Google Workspace data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-GWorkspaceReportsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. GWorkspaceXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tGooglePickleString\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n4. (Optional) Change the default delays if required. \n\n\t> **NOTE:** The following default values for ingestion delays have been added for different set of logs from Google Workspace based on Google [documentation](https://support.google.com/a/answer/7061566). These can be modified based on environmental requirements. \n\t\t Fetch Delay - 10 minutes \n\t\t Calendar Fetch Delay - 6 hours \n\t\t Chat Fetch Delay - 1 day \n\t\t User Accounts Fetch Delay - 3 hours \n\t\t Login Fetch Delay - 6 hours \n\n5. Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`. \n6. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**GooglePickleString** is required for REST API. [See the documentation to learn more about API](https://developers.google.com/admin-sdk/reports/v1/reference/activities). Please find the instructions to obtain the credentials in the configuration section below. You can check all [requirements and follow the instructions](https://developers.google.com/admin-sdk/reports/v1/quickstart/python) from here as well.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GoogleWorkspaceReports/Data%20Connectors/GWorkspaceReports_API_FunctionApp.json","true" +"ThreatIntelligenceIndicator","GreyNoiseThreatIntelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GreyNoiseThreatIntelligence","greynoiseintelligenceinc1681236078693","microsoft-sentinel-byol-greynoise","2023-09-05","2025-07-28","","GreyNoise","Partner","https://www.greynoise.io/contact/general","","domains","GreyNoise2SentinelAPI","GreyNoise, Inc. and BlueCycle LLC","GreyNoise Threat Intelligence","This Data Connector installs an Azure Function app to download GreyNoise indicators once per day and inserts them into the ThreatIntelligenceIndicator table in Microsoft Sentinel.","[{""title"": ""You can connect GreyNoise Threat Intelligence to Microsoft Sentinel by following the below steps: "", ""description"": ""\n> The following steps create an Azure AAD application, retrieves a GreyNoise API key, and saves the values in an Azure Function App Configuration.""}, {""title"": ""1. Retrieve your API Key from GreyNoise Visualizer."", ""description"": ""Generate an API key from GreyNoise Visualizer https://docs.greynoise.io/docs/using-the-greynoise-api""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID and Client ID. Also, get the Log Analytics Workspace ID associated with your Microsoft Sentinel instance (it should display below)."", ""description"": ""Follow the instructions here to create your Azure AAD app and save your Client ID and Tenant ID: https://learn.microsoft.com/en-us/azure/sentinel/connect-threat-intelligence-upload-api#instructions\n NOTE: Wait until step 5 to generate your client secret."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Follow the instructions here to add the Microsoft Sentinel Contributor Role: https://learn.microsoft.com/en-us/azure/sentinel/connect-threat-intelligence-upload-api#assign-a-role-to-the-application""}, {""title"": ""4. Specify the AAD permissions to enable MS Graph API access to the upload-indicators API."", ""description"": ""Follow this section here to add **'ThreatIndicators.ReadWrite.OwnedBy'** permission to the AAD App: https://learn.microsoft.com/en-us/azure/sentinel/connect-threat-intelligence-tip#specify-the-permissions-required-by-the-application. \n Back in your AAD App, ensure you grant admin consent for the permissions you just added. \n Finally, in the 'Tokens and APIs' section, generate a client secret and save it. You will need it in Step 6. ""}, {""title"": ""5. Deploy the Threat Intelligence (Preview) Solution, which includes the Threat Intelligence Upload Indicators API (Preview)"", ""description"": ""See Microsoft Sentinel Content Hub for this Solution, and install it in the Microsoft Sentinel instance.""}, {""title"": ""6. Deploy the Azure Function"", ""description"": ""Click the Deploy to Azure button.\n\n [![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-GreyNoise-azuredeploy)\n\n Fill in the appropriate values for each parameter. **Be aware** that the only valid values for the **GREYNOISE_CLASSIFICATIONS** parameter are **benign**, **malicious** and/or **unknown**, which must be comma-separated.""}, {""title"": ""7. Send indicators to Sentinel"", ""description"": ""The function app installed in Step 6 queries the GreyNoise GNQL API once per day, and submits each indicator found in STIX 2.1 format to the [Microsoft Upload Threat Intelligence Indicators API](https://learn.microsoft.com/en-us/azure/sentinel/upload-indicators-api). \n Each indicator expires in ~24 hours from creation unless found on the next day's query. In this case the TI Indicator's **Valid Until** time is extended for another 24 hours, which keeps it active in Microsoft Sentinel. \n\n For more information on the GreyNoise API and the GreyNoise Query Language (GNQL), [click here](https://developer.greynoise.io/docs/using-the-greynoise-api).""}]","{""resourceProvider"": [{""provider"": ""Microsoft.SecurityInsights/threatintelligence/write"", ""permissionsDisplayText"": ""write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""GreyNoise API Key"", ""description"": ""Retrieve your GreyNoise API Key [here](https://viz.greynoise.io/account/api-key).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/GreyNoiseThreatIntelligence/Data%20Connectors/GreyNoiseConnector_UploadIndicatorsAPI.json","true" +"","Group-IB","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Group-IB","","","","","","","","","","","","","","","","","","false" +"","HIPAA Compliance","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/HIPAA%20Compliance","azuresentinel","azure-sentinel-solution-hipaacompliance","2025-10-08","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"","HYAS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/HYAS","hyas","a-hyas-insight-azure-sentinel-solutions-gallery","2021-10-20","","","HYAS","Partner","https://www.hyas.com/contact","","domains","","","","","","","","false" +"HYASProtectDnsSecurityLogs_CL","HYAS Protect","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/HYAS%20Protect","hyas","microsoft-sentinel-solution-hyas-protect","2023-09-26","","","HYAS","Partner","https://www.hyas.com/contact","","domains","HYASProtect","HYAS","HYAS Protect","HYAS Protect provide logs based on reputation values - Blocked, Malicious, Permitted, Suspicious.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the HYAS API to pull Logs into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the HYAS Protect data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-HYASProtect-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Function Name**, **Table Name**, **Workspace ID**, **Workspace Key**, **API Key**, **TimeInterval**, **FetchBlockedDomains**, **FetchMaliciousDomains**, **FetchSuspiciousDomains**, **FetchPermittedDomains** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the HYAS Protect Logs data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> NOTE:You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-HYASProtect-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. HyasProtectLogsXXX).\n\n\te. **Select a runtime:** Choose Python 3.8.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tAPIKey\n\t\tPolling\n\t\tWorkspaceID\n\t\tWorkspaceKey\n. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**HYAS API Key** is required for making API calls.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/HYAS%20Protect/Data%20Connectors/HYASProtect_FunctionApp.json","true" +"net_assets_CL","HolmSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/HolmSecurity","holmsecurityswedenab1639511288603","holmsecurity_sc_sentinel","2022-07-18","","","Holm Security","Partner","https://support.holmsecurity.com/","","domains","HolmSecurityAssets","Holm Security","Holm Security Asset Data","The connector provides the capability to poll data from Holm Security Center into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Holm Security Assets to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Holm Security API**\n\n [Follow these instructions](https://support.holmsecurity.com/knowledge/how-do-i-set-up-an-api-token) to create an API authentication token.""}, {""title"": """", ""description"": ""**STEP 2 - Use the below deployment option to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Holm Security connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Holm Security API authorization Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template Deployment"", ""description"": ""**Option 1 - Azure Resource Manager (ARM) Template**\n\nUse this method for automated deployment of the Holm Security connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-holmsecurityassets-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **API Username**, **API Password**, 'and/or Other required fields'. \n>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Holm Security API Token"", ""description"": ""Holm Security API Token is required. [Holm Security API Token](https://support.holmsecurity.com/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/HolmSecurity/Data%20Connectors/HolmSecurityAssets_API_FunctionApp.json","true" +"web_assets_CL","HolmSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/HolmSecurity","holmsecurityswedenab1639511288603","holmsecurity_sc_sentinel","2022-07-18","","","Holm Security","Partner","https://support.holmsecurity.com/","","domains","HolmSecurityAssets","Holm Security","Holm Security Asset Data","The connector provides the capability to poll data from Holm Security Center into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Holm Security Assets to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Holm Security API**\n\n [Follow these instructions](https://support.holmsecurity.com/knowledge/how-do-i-set-up-an-api-token) to create an API authentication token.""}, {""title"": """", ""description"": ""**STEP 2 - Use the below deployment option to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Holm Security connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Holm Security API authorization Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template Deployment"", ""description"": ""**Option 1 - Azure Resource Manager (ARM) Template**\n\nUse this method for automated deployment of the Holm Security connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-holmsecurityassets-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **API Username**, **API Password**, 'and/or Other required fields'. \n>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Holm Security API Token"", ""description"": ""Holm Security API Token is required. [Holm Security API Token](https://support.holmsecurity.com/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/HolmSecurity/Data%20Connectors/HolmSecurityAssets_API_FunctionApp.json","true" +"","HoneyTokens","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/HoneyTokens","","","","","","","","","","","","","","","","","","false" +"CyberpionActionItems_CL","IONIX","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IONIX","cyberpion1597832716616","cyberpion_mss","2022-05-02","","","IONIX","Partner","https://www.ionix.io/contact-us/","","domains","CyberpionSecurityLogs","IONIX","IONIX Security Logs","The IONIX Security Logs data connector, ingests logs from the IONIX system directly into Sentinel. The connector allows users to visualize their data, create alerts and incidents and improve security investigations.","[{""title"": """", ""description"": ""Follow the [instructions](https://www.ionix.io/integrations/azure-sentinel/) to integrate IONIX Security Alerts into Sentinel."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""IONIX Subscription"", ""description"": ""a subscription and account is required for IONIX logs. [One can be acquired here.](https://azuremarketplace.microsoft.com/en/marketplace/apps/cyberpion1597832716616.cyberpion)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IONIX/Data%20Connectors/IONIXSecurityLogs.json","true" +"","IPQualityScore","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPQualityScore","ipqualityscorellc1632794263588","ipqs_1","2021-10-20","","","IPQS Plugins Team","Partner","https://www.ipqualityscore.com/contact-us","","domains","","","","","","","","false" +"Ipinfo_ASN_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoASNDataConnector","IPinfo","IPinfo ASN Data Connector","This IPinfo data connector installs an Azure Function app to download standard_ASN datasets and insert it into custom log table in Microsoft Sentinel","[{""title"": ""1. Retrieve API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application"", ""description"": ""In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link.""}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same \u201cResource Group\u201d you use for \u201cLog Analytic Workspace\u201d on which \u201cMicrosoft Sentinel\u201d is added: Use this Link.""}, {""title"": ""4. Get Workspace Resource ID"", ""description"": ""Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'""}, {""title"": ""5. Deploy the Azure Function"", ""description"": ""Use this for automated deployment of the IPinfo data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-ASN-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**.""}, {""title"": ""Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-ASN-functionapp). \n2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. \n3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. \n4. After successful deployment of the function app, follow the next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. \n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive):\n\t\tRESOURCE_ID\n\t\tIPINFO_TOKEN\n\t\tTENANT_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tRETENTION_IN_DAYS\n\t\tTOTAL_RETENTION_IN_DAYS\n\t\tSCHEDULE\n\t\tLOCATION \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""IPinfo API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/ASN/IPinfo_ASN_API_AzureFunctionApp.json","true" +"Ipinfo_Abuse_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoAbuseDataConnector","IPinfo","IPinfo Abuse Data Connector","This IPinfo data connector installs an Azure Function app to download standard_abuse datasets and insert it into custom log table in Microsoft Sentinel","[{""title"": ""1. Retrieve API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application"", ""description"": ""In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link.""}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same \u201cResource Group\u201d you use for \u201cLog Analytic Workspace\u201d on which \u201cMicrosoft Sentinel\u201d is added: Use this Link.""}, {""title"": ""4. Get Workspace Resource ID"", ""description"": ""Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'""}, {""title"": ""5. Deploy the Azure Function"", ""description"": ""Use this for automated deployment of the IPinfo data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-Abuse-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**.""}, {""title"": ""Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-Abuse-functionapp). \n2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. \n3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. \n4. After successful deployment of the function app, follow the next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. \n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive):\n\t\tRESOURCE_ID\n\t\tIPINFO_TOKEN\n\t\tTENANT_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tRETENTION_IN_DAYS\n\t\tTOTAL_RETENTION_IN_DAYS\n\t\tSCHEDULE\n\t\tLOCATION \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""IPinfo API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/Abuse/IPinfo_Abuse_API_AzureFunctionApp.json","true" +"Ipinfo_Carrier_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoCarrierDataConnector","IPinfo","IPinfo Carrier Data Connector","This IPinfo data connector installs an Azure Function app to download standard_carrier datasets and insert it into custom log table in Microsoft Sentinel","[{""title"": ""1. Retrieve API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application"", ""description"": ""In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link.""}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same \u201cResource Group\u201d you use for \u201cLog Analytic Workspace\u201d on which \u201cMicrosoft Sentinel\u201d is added: Use this Link.""}, {""title"": ""4. Get Workspace Resource ID"", ""description"": ""Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'""}, {""title"": ""5. Deploy the Azure Function"", ""description"": ""Use this for automated deployment of the IPinfo data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-Carrier-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**.""}, {""title"": ""Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-Carrier-functionapp). \n2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. \n3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. \n4. After successful deployment of the function app, follow the next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. \n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive):\n\t\tRESOURCE_ID\n\t\tIPINFO_TOKEN\n\t\tTENANT_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tRETENTION_IN_DAYS\n\t\tTOTAL_RETENTION_IN_DAYS\n\t\tSCHEDULE\n\t\tLOCATION \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""IPinfo API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/Carrier/IPinfo_Carrier_API_AzureFunctionApp.json","true" +"Ipinfo_Company_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoCompanyDataConnector","IPinfo","IPinfo Company Data Connector","This IPinfo data connector installs an Azure Function app to download standard_company datasets and insert it into custom log table in Microsoft Sentinel","[{""title"": ""1. Retrieve API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application"", ""description"": ""In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link.""}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same \u201cResource Group\u201d you use for \u201cLog Analytic Workspace\u201d on which \u201cMicrosoft Sentinel\u201d is added: Use this Link.""}, {""title"": ""4. Get Workspace Resource ID"", ""description"": ""Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'""}, {""title"": ""5. Deploy the Azure Function"", ""description"": ""Use this for automated deployment of the IPinfo data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-Company-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**.""}, {""title"": ""Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-Company-functionapp). \n2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. \n3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. \n4. After successful deployment of the function app, follow the next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. \n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive):\n\t\tRESOURCE_ID\n\t\tIPINFO_TOKEN\n\t\tTENANT_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tRETENTION_IN_DAYS\n\t\tTOTAL_RETENTION_IN_DAYS\n\t\tSCHEDULE\n\t\tLOCATION \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""IPinfo API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/Company/IPinfo_Company_API_AzureFunctionApp.json","true" +"Ipinfo_Country_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoCountryDataConnector","IPinfo","IPinfo Country ASN Data Connector","This IPinfo data connector installs an Azure Function app to download country_asn datasets and insert it into custom log table in Microsoft Sentinel","[{""title"": ""1. Retrieve API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application"", ""description"": ""In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link.""}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same \u201cResource Group\u201d you use for \u201cLog Analytic Workspace\u201d on which \u201cMicrosoft Sentinel\u201d is added: Use this Link.""}, {""title"": ""4. Get Workspace Resource ID"", ""description"": ""Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'""}, {""title"": ""5. Deploy the Azure Function"", ""description"": ""Use this for automated deployment of the IPinfo data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-Country-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**.""}, {""title"": ""Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-Country-functionapp). \n2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. \n3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. \n4. After successful deployment of the function app, follow the next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. \n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive):\n\t\tRESOURCE_ID\n\t\tIPINFO_TOKEN\n\t\tTENANT_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tRETENTION_IN_DAYS\n\t\tTOTAL_RETENTION_IN_DAYS\n\t\tSCHEDULE\n\t\tLOCATION \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""IPinfo API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/Country%20ASN/IPinfo_Country_API_AzureFunctionApp.json","true" +"Ipinfo_Domain_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoDomainDataConnector","IPinfo","IPinfo Domain Data Connector","This IPinfo data connector installs an Azure Function app to download standard_domain datasets and insert it into custom log table in Microsoft Sentinel","[{""title"": ""1. Retrieve API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application"", ""description"": ""In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link.""}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same \u201cResource Group\u201d you use for \u201cLog Analytic Workspace\u201d on which \u201cMicrosoft Sentinel\u201d is added: Use this Link.""}, {""title"": ""4. Get Workspace Resource ID"", ""description"": ""Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'""}, {""title"": ""5. Deploy the Azure Function"", ""description"": ""Use this for automated deployment of the IPinfo data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-Domain-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**.""}, {""title"": ""Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-Domain-functionapp). \n2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. \n3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. \n4. After successful deployment of the function app, follow the next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. \n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive):\n\t\tRESOURCE_ID\n\t\tIPINFO_TOKEN\n\t\tTENANT_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tRETENTION_IN_DAYS\n\t\tTOTAL_RETENTION_IN_DAYS\n\t\tSCHEDULE\n\t\tLOCATION \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""IPinfo API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/Domain/IPinfo_Domain_API_AzureFunctionApp.json","true" +"Ipinfo_Location_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoIplocationDataConnector","IPinfo","IPinfo Iplocation Data Connector","This IPinfo data connector installs an Azure Function app to download standard_location datasets and insert it into custom log table in Microsoft Sentinel","[{""title"": ""1. Retrieve API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application"", ""description"": ""In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link.""}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same \u201cResource Group\u201d you use for \u201cLog Analytic Workspace\u201d on which \u201cMicrosoft Sentinel\u201d is added: Use this Link.""}, {""title"": ""4. Get Workspace Resource ID"", ""description"": ""Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'""}, {""title"": ""5. Deploy the Azure Function"", ""description"": ""Use this for automated deployment of the IPinfo data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-Iplocation-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**.""}, {""title"": ""Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-Iplocation-functionapp). \n2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. \n3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. \n4. After successful deployment of the function app, follow the next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. \n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive):\n\t\tRESOURCE_ID\n\t\tIPINFO_TOKEN\n\t\tTENANT_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tRETENTION_IN_DAYS\n\t\tTOTAL_RETENTION_IN_DAYS\n\t\tSCHEDULE\n\t\tLOCATION \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""IPinfo API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/Iplocation/IPinfo_Iplocation_API_AzureFunctionApp.json","true" +"Ipinfo_Location_extended_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoIplocationExtendedDataConnector","IPinfo","IPinfo Iplocation Extended Data Connector","This IPinfo data connector installs an Azure Function app to download standard_location_extended datasets and insert it into custom log table in Microsoft Sentinel","[{""title"": ""1. Retrieve API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application"", ""description"": ""In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link.""}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same \u201cResource Group\u201d you use for \u201cLog Analytic Workspace\u201d on which \u201cMicrosoft Sentinel\u201d is added: Use this Link.""}, {""title"": ""4. Get Workspace Resource ID"", ""description"": ""Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'""}, {""title"": ""5. Deploy the Azure Function"", ""description"": ""Use this for automated deployment of the IPinfo data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-Iplocation-Extended-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**.""}, {""title"": ""Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-Iplocation-Extended-functionapp). \n2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. \n3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. \n4. After successful deployment of the function app, follow the next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. \n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive):\n\t\tRESOURCE_ID\n\t\tIPINFO_TOKEN\n\t\tTENANT_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tRETENTION_IN_DAYS\n\t\tTOTAL_RETENTION_IN_DAYS\n\t\tSCHEDULE\n\t\tLOCATION \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""IPinfo API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/Iplocation%20Extended/IPinfo_Iplocation_Extended_API_AzureFunctionApp.json","true" +"Ipinfo_Privacy_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoPrivacyDataConnector","IPinfo","IPinfo Privacy Data Connector","This IPinfo data connector installs an Azure Function app to download standard_privacy datasets and insert it into custom log table in Microsoft Sentinel","[{""title"": ""1. Retrieve API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application"", ""description"": ""In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link.""}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same \u201cResource Group\u201d you use for \u201cLog Analytic Workspace\u201d on which \u201cMicrosoft Sentinel\u201d is added: Use this Link.""}, {""title"": ""4. Get Workspace Resource ID"", ""description"": ""Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'""}, {""title"": ""5. Deploy the Azure Function"", ""description"": ""Use this for automated deployment of the IPinfo data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-Privacy-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**.""}, {""title"": ""Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-Privacy-functionapp). \n2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. \n3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. \n4. After successful deployment of the function app, follow the next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. \n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive):\n\t\tRESOURCE_ID\n\t\tIPINFO_TOKEN\n\t\tTENANT_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tRETENTION_IN_DAYS\n\t\tTOTAL_RETENTION_IN_DAYS\n\t\tSCHEDULE\n\t\tLOCATION \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""IPinfo API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/Privacy/IPinfo_Privacy_API_AzureFunctionApp.json","true" +"Ipinfo_Privacy_extended_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoPrivacyExtendedDataConnector","IPinfo","IPinfo Privacy Extended Data Connector","This IPinfo data connector installs an Azure Function app to download standard_privacy datasets and insert it into custom log table in Microsoft Sentinel","[{""title"": ""1. Retrieve API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application"", ""description"": ""In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link.""}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same \u201cResource Group\u201d you use for \u201cLog Analytic Workspace\u201d on which \u201cMicrosoft Sentinel\u201d is added: Use this Link.""}, {""title"": ""4. Get Workspace Resource ID"", ""description"": ""Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'""}, {""title"": ""5. Deploy the Azure Function"", ""description"": ""Use this for automated deployment of the IPinfo data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-Privacy-Extended-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**.""}, {""title"": ""Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-Privacy-Extended-functionapp). \n2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. \n3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. \n4. After successful deployment of the function app, follow the next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. \n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive):\n\t\tRESOURCE_ID\n\t\tIPINFO_TOKEN\n\t\tTENANT_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tRETENTION_IN_DAYS\n\t\tTOTAL_RETENTION_IN_DAYS\n\t\tSCHEDULE\n\t\tLOCATION \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""IPinfo API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/Privacy%20Extended/IPinfo_Privacy_Extended_API_AzureFunctionApp.json","true" +"Ipinfo_RIRWHOIS_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoRIRWHOISDataConnector","IPinfo","IPinfo RIRWHOIS Data Connector","This IPinfo data connector installs an Azure Function app to download RIRWHOIS datasets and insert it into custom log table in Microsoft Sentinel","[{""title"": ""1. Retrieve API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application"", ""description"": ""In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link.""}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same \u201cResource Group\u201d you use for \u201cLog Analytic Workspace\u201d on which \u201cMicrosoft Sentinel\u201d is added: Use this Link.""}, {""title"": ""4. Get Workspace Resource ID"", ""description"": ""Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'""}, {""title"": ""5. Deploy the Azure Function"", ""description"": ""Use this for automated deployment of the IPinfo data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-RIRWHOIS-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**.""}, {""title"": ""Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-RIRWHOIS-functionapp). \n2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. \n3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. \n4. After successful deployment of the function app, follow the next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. \n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive):\n\t\tRESOURCE_ID\n\t\tIPINFO_TOKEN\n\t\tTENANT_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tRETENTION_IN_DAYS\n\t\tTOTAL_RETENTION_IN_DAYS\n\t\tSCHEDULE\n\t\tLOCATION \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""IPinfo API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/RIRWHOIS/IPinfo_RIRWHOIS_API_AzureFunctionApp.json","true" +"Ipinfo_RWHOIS_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoRWHOISDataConnector","IPinfo","IPinfo RWHOIS Data Connector","This IPinfo data connector installs an Azure Function app to download RWHOIS datasets and insert it into custom log table in Microsoft Sentinel","[{""title"": ""1. Retrieve API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application"", ""description"": ""In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link.""}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same \u201cResource Group\u201d you use for \u201cLog Analytic Workspace\u201d on which \u201cMicrosoft Sentinel\u201d is added: Use this Link.""}, {""title"": ""4. Get Workspace Resource ID"", ""description"": ""Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'""}, {""title"": ""5. Deploy the Azure Function"", ""description"": ""Use this for automated deployment of the IPinfo data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-RWHOIS-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**.""}, {""title"": ""Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-RWHOIS-functionapp). \n2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. \n3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. \n4. After successful deployment of the function app, follow the next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. \n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive):\n\t\tRESOURCE_ID\n\t\tIPINFO_TOKEN\n\t\tTENANT_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tRETENTION_IN_DAYS\n\t\tTOTAL_RETENTION_IN_DAYS\n\t\tSCHEDULE\n\t\tLOCATION \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""IPinfo API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/RWHOIS/IPinfo_RWHOIS_API_AzureFunctionApp.json","true" +"Ipinfo_WHOIS_ASN_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoWHOISASNDataConnector","IPinfo","IPinfo WHOIS ASN Data Connector","This IPinfo data connector installs an Azure Function app to download WHOIS_ASN datasets and insert it into custom log table in Microsoft Sentinel","[{""title"": ""1. Retrieve API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application"", ""description"": ""In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link.""}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same \u201cResource Group\u201d you use for \u201cLog Analytic Workspace\u201d on which \u201cMicrosoft Sentinel\u201d is added: Use this Link.""}, {""title"": ""4. Get Workspace Resource ID"", ""description"": ""Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'""}, {""title"": ""5. Deploy the Azure Function"", ""description"": ""Use this for automated deployment of the IPinfo data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-WHOIS-ASN-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**.""}, {""title"": ""Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-WHOIS-ASN-functionapp). \n2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. \n3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. \n4. After successful deployment of the function app, follow the next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. \n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive):\n\t\tRESOURCE_ID\n\t\tIPINFO_TOKEN\n\t\tTENANT_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tRETENTION_IN_DAYS\n\t\tTOTAL_RETENTION_IN_DAYS\n\t\tSCHEDULE\n\t\tLOCATION \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""IPinfo API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/WHOIS%20ASN/IPinfo_WHOIS_ASN_API_AzureFunctionApp.json","true" +"Ipinfo_WHOIS_MNT_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoWHOISMNTDataConnector","IPinfo","IPinfo WHOIS MNT Data Connector","This IPinfo data connector installs an Azure Function app to download WHOIS_MNT datasets and insert it into custom log table in Microsoft Sentinel","[{""title"": ""1. Retrieve API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application"", ""description"": ""In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link.""}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same \u201cResource Group\u201d you use for \u201cLog Analytic Workspace\u201d on which \u201cMicrosoft Sentinel\u201d is added: Use this Link.""}, {""title"": ""4. Get Workspace Resource ID"", ""description"": ""Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'""}, {""title"": ""5. Deploy the Azure Function"", ""description"": ""Use this for automated deployment of the IPinfo data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-WHOIS-MNT-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**.""}, {""title"": ""Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-WHOIS-MNT-functionapp). \n2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. \n3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. \n4. After successful deployment of the function app, follow the next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. \n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive):\n\t\tRESOURCE_ID\n\t\tIPINFO_TOKEN\n\t\tTENANT_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tRETENTION_IN_DAYS\n\t\tTOTAL_RETENTION_IN_DAYS\n\t\tSCHEDULE\n\t\tLOCATION \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""IPinfo API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/WHOIS%20MNT/IPinfo_WHOIS_MNT_API_AzureFunctionApp.json","true" +"Ipinfo_WHOIS_NET_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoWHOISNETDataConnector","IPinfo","IPinfo WHOIS NET Data Connector","This IPinfo data connector installs an Azure Function app to download WHOIS_NET datasets and insert it into custom log table in Microsoft Sentinel","[{""title"": ""1. Retrieve API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application"", ""description"": ""In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link.""}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same \u201cResource Group\u201d you use for \u201cLog Analytic Workspace\u201d on which \u201cMicrosoft Sentinel\u201d is added: Use this Link.""}, {""title"": ""4. Get Workspace Resource ID"", ""description"": ""Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'""}, {""title"": ""5. Deploy the Azure Function"", ""description"": ""Use this for automated deployment of the IPinfo data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-WHOIS-NET-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**.""}, {""title"": ""Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-WHOIS-NET-functionapp). \n2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. \n3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. \n4. After successful deployment of the function app, follow the next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. \n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive):\n\t\tRESOURCE_ID\n\t\tIPINFO_TOKEN\n\t\tTENANT_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tRETENTION_IN_DAYS\n\t\tTOTAL_RETENTION_IN_DAYS\n\t\tSCHEDULE\n\t\tLOCATION \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""IPinfo API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/WHOIS%20NET/IPinfo_WHOIS_NET_API_AzureFunctionApp.json","true" +"Ipinfo_WHOIS_ORG_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoWHOISORGDataConnector","IPinfo","IPinfo WHOIS ORG Data Connector","This IPinfo data connector installs an Azure Function app to download WHOIS_ORG datasets and insert it into custom log table in Microsoft Sentinel","[{""title"": ""1. Retrieve API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application"", ""description"": ""In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link.""}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same \u201cResource Group\u201d you use for \u201cLog Analytic Workspace\u201d on which \u201cMicrosoft Sentinel\u201d is added: Use this Link.""}, {""title"": ""4. Get Workspace Resource ID"", ""description"": ""Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'""}, {""title"": ""5. Deploy the Azure Function"", ""description"": ""Use this for automated deployment of the IPinfo data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-WHOIS-ORG-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**.""}, {""title"": ""Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-WHOIS-ORG-functionapp). \n2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. \n3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. \n4. After successful deployment of the function app, follow the next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. \n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive):\n\t\tRESOURCE_ID\n\t\tIPINFO_TOKEN\n\t\tTENANT_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tRETENTION_IN_DAYS\n\t\tTOTAL_RETENTION_IN_DAYS\n\t\tSCHEDULE\n\t\tLOCATION \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""IPinfo API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/WHOIS%20ORG/IPinfo_WHOIS_ORG_API_AzureFunctionApp.json","true" +"Ipinfo_WHOIS_POC_CL","IPinfo","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo","idbllc1687537942583","microsoft-sentinel-solution-ipinfo-ipintelligence","2024-05-02","","","IPinfo","Partner","https://www.ipinfo.io/","","domains","IPinfoWHOISPOCDataConnector","IPinfo","IPinfo WHOIS POC Data Connector","This IPinfo data connector installs an Azure Function app to download WHOIS_POC datasets and insert it into custom log table in Microsoft Sentinel","[{""title"": ""1. Retrieve API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}, {""title"": ""2. In your Azure AD tenant, create an Azure Active Directory (AAD) application"", ""description"": ""In your Azure AD tenant, create an Azure Active Directory (AAD) application and acquire Tenant ID, Client ID, and Client Secret: Use this Link.""}, {""title"": ""3. Assign the AAD application the Microsoft Sentinel Contributor Role."", ""description"": ""Assign the AAD application you just created to the Contributor(Privileged administrator roles) and Monitoring Metrics Publisher(Job function roles) in the same \u201cResource Group\u201d you use for \u201cLog Analytic Workspace\u201d on which \u201cMicrosoft Sentinel\u201d is added: Use this Link.""}, {""title"": ""4. Get Workspace Resource ID"", ""description"": ""Use the Log Analytic Workspace -> Properties blade having the 'Resource ID' property value. This is a fully qualified resourceId which is in the format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'""}, {""title"": ""5. Deploy the Azure Function"", ""description"": ""Use this for automated deployment of the IPinfo data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IPinfo-WHOIS-POC-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **RESOURCE_ID**, **IPINFO_TOKEN**, **TENANT_ID**, **CLIENT_ID**, **CLIENT_SECRET**.""}, {""title"": ""Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the IPinfo data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the Azure Function App file. Extract the archive to your local development computer [Azure Function App](https://aka.ms/sentinel-Ipinfo-WHOIS-POC-functionapp). \n2. Create Function App using Hosting Functions Premium or App service plan using advanced option using VSCode. \n3. Follow the function app manual deployment instructions to deploy the Azure Functions app using VSCode. \n4. After successful deployment of the function app, follow the next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Settings** -> **Configuration** or **Environment variables**. \n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive):\n\t\tRESOURCE_ID\n\t\tIPINFO_TOKEN\n\t\tTENANT_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tRETENTION_IN_DAYS\n\t\tTOTAL_RETENTION_IN_DAYS\n\t\tSCHEDULE\n\t\tLOCATION \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""IPinfo API Token"", ""description"": ""Retrieve your IPinfo API Token [here](https://ipinfo.io/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IPinfo/Data%20Connectors/WHOIS%20POC/IPinfo_WHOIS_POC_API_AzureFunctionApp.json","true" +"Syslog","ISC Bind","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ISC%20Bind","azuresentinel","azure-sentinel-solution-iscbind","2022-09-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ISCBind","ISC","[Deprecated] ISC Bind","The [ISC Bind](https://www.isc.org/bind/) connector allows you to easily connect your ISC Bind logs with Microsoft Sentinel. This gives you more insight into your organization's network traffic data, DNS query data, traffic statistics and improves your security operation capabilities.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias ISCBind and load the function code or click [here](https://aka.ms/sentinel-iscbind-parser).The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n 2. Select **Apply below configuration to my machines** and select the facilities and severities.\n 3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure and connect the ISC Bind"", ""description"": ""1. Follow these instructions to configure the ISC Bind to forward syslog: \n - [DNS Logs](https://kb.isc.org/docs/aa-01526) \n2. Configure Syslog to send the Syslog traffic to Agent. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""ISC Bind"", ""description"": ""must be configured to export logs via Syslog""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ISC%20Bind/Data%20Connectors/Connector_Syslog_ISCBind.json","true" +"CommonSecurityLog","Illumio Core","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illumio%20Core","azuresentinel","azure-sentinel-solution-illumiocore","2022-05-26","","","Microsoft","Microsoft","https://support.microsoft.com","","domains","IllumioCore","Illumio","[Deprecated] Illumio Core via Legacy Agent","The [Illumio Core](https://www.illumio.com/products/) data connector provides the capability to ingest Illumio Core logs into Microsoft Sentinel.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias IllumioCoreEvent and load the function code or click [here](https://aka.ms/sentinel-IllumioCore-parser).The function usually takes 10-15 minutes to activate after solution installation/update and maps Illumio Core events to Microsoft Sentinel Information Model (ASIM).""}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Configure Ilumio Core to send logs using CEF"", ""description"": ""2.1 Configure Event Format\n\n 1. From the PCE web console menu, choose **Settings > Event Settings** to view your current settings.\n\n 2. Click **Edit** to change the settings.\n\n 3. Set **Event Format** to CEF.\n\n 4. (Optional) Configure **Event Severity** and **Retention Period**.\n\n2.2 Configure event forwarding to an external syslog server\n\n 1. From the PCE web console menu, choose **Settings > Event Settings**.\n\n 2. Click **Add**.\n\n 3. Click **Add Repository**.\n\n 4. Complete the **Add Repository** dialog.\n\n 5. Click **OK** to save the event forwarding configuration.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illumio%20Core/Data%20Connectors/Connector_IllumioCore_CEF.json","true" +"CommonSecurityLog","Illumio Core","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illumio%20Core","azuresentinel","azure-sentinel-solution-illumiocore","2022-05-26","","","Microsoft","Microsoft","https://support.microsoft.com","","domains","IllumioCoreAma","Illumio","[Deprecated] Illumio Core via AMA","The [Illumio Core](https://www.illumio.com/products/) data connector provides the capability to ingest Illumio Core logs into Microsoft Sentinel.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias IllumioCoreEvent and load the function code or click [here](https://aka.ms/sentinel-IllumioCore-parser).The function usually takes 10-15 minutes to activate after solution installation/update and maps Illumio Core events to Microsoft Sentinel Information Model (ASIM)."", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine.""}, {""title"": ""Step B. Configure Ilumio Core to send logs using CEF"", ""description"": ""Configure Event Format\n\n 1. From the PCE web console menu, choose **Settings > Event Settings** to view your current settings.\n\n 2. Click **Edit** to change the settings.\n\n 3. Set **Event Format** to CEF.\n\n 4. (Optional) Configure **Event Severity** and **Retention Period**.\n\nConfigure event forwarding to an external syslog server\n\n 1. From the PCE web console menu, choose **Settings > Event Settings**.\n\n 2. Click **Add**.\n\n 3. Click **Add Repository**.\n\n 4. Complete the **Add Repository** dialog.\n\n 5. Click **OK** to save the event forwarding configuration.""}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illumio%20Core/Data%20Connectors/template_IllumioCoreAMA.json","true" +"IllumioInsights_CL","Illumio Insight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illumio%20Insight","illumioinc1629822633689","azure-sentinel-solution-illumioinsight","2025-08-10","","","Illumio","Partner","https://www.illumio.com/support/support","","domains","IllumioInsightsDefinition","Microsoft","Illumio Insights","Illumio Insights Connector sends workload and security graph data from Illumio Insights into the Azure Microsoft Sentinel Data Lake, providing deep context for threat detection, lateral movement analysis, and real-time investigation.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the Illumio Insights Connector\n\n**Prerequisites**\n- Register and Login to Illumio Console with valid credentials\n- Purchase Illumio Insights or Start a free Trial for Illumio Insights\n\n**Step 1: Register the Service Account**\n1. Go to **Illumio Console \u2192 Access \u2192 Service Accounts**\n2. Create a service account for the tenant\n3. Once you create a service account, you will receive the client credentials\n4. Copy the **auth_username** (Illumio Insights API Key) and the **Secret** (API Secret)\n\n**Step 2: Add Client Credentials to Sentinel Account**\n- Add the API key and secret to Sentinel Account for tenant authentication\n- These credentials will be used to authenticate calls to the Illumio SaaS API\n\nPlease fill in the required fields below with the credentials obtained from the Illumio Console:""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Illumio Insights Api Key"", ""placeholder"": ""api_XXXXXX"", ""type"": ""password"", ""name"": ""apiKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Api Secret"", ""placeholder"": ""API Secret"", ""type"": ""password"", ""name"": ""apiToken""}}, {""parameters"": {""label"": ""Illumio Tenant Id"", ""placeholder"": ""{IllumioTenantId - Optional}"", ""type"": ""text"", ""name"": ""illumioTenantId""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illumio%20Insight/Data%20Connectors/IllumioInsight_CCP/IllumioInsight_Definition.json","true" +"IllumioInsightsSummary_CL","Illumio Insight","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illumio%20Insight","illumioinc1629822633689","azure-sentinel-solution-illumioinsight","2025-08-10","","","Illumio","Partner","https://www.illumio.com/support/support","","domains","IllumioInsightsSummaryCCP","Illumio","Illumio Insights Summary","The Illumio Insights Summary connector Publishes AI-powered threat discovery and anomaly reports generated by the Illumio Insights Agent. Leveraging the MITRE ATT&CK framework, these reports surface high-fidelity insights into emerging threats and risky behaviors, directly into the Data Lake.","[{""title"": ""1. Configuration"", ""description"": ""Configure the Illumio Insights Summary connector."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""> This data connector may take 24 hrs for the latest report after onboarding""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the Illumio Insights Summary Connector\n\n**Prerequisites**\n- Register and Login to Illumio Console with valid credentials\n- Purchase Illumio Insights or Start a free Trial for Illumio Insights\n- Enable The Illumio Insights Agent\n\n**Step 1: Register the Service Account**\n1. Go to **Illumio Console \u2192 Access \u2192 Service Accounts**\n2. Create a service account for the tenant\n3. Once you create a service account, you will receive the client credentials\n4. Copy the **auth_username** (Illumio Insights API Key) and the **Secret** (API Secret)\n\n**Step 2: Add Client Credentials to Sentinel Account**\n- Add the API key and secret to Sentinel Account for tenant authentication\n- These credentials will be used to authenticate calls to the Illumio SaaS API \n\nPlease fill in the required fields below with the credentials obtained from the Illumio Console:""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Illumio Insights Api Key"", ""placeholder"": ""api_XXXXXX"", ""type"": ""password"", ""name"": ""apiKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Api Secret"", ""placeholder"": ""API Secret"", ""type"": ""password"", ""name"": ""apiToken""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Illumio Tenant ID"", ""placeholder"": ""{IllumioTenantId - Optional}"", ""type"": ""text"", ""name"": ""illumioTenantId""}}]}, {""title"": ""2. Connect"", ""description"": ""Enable the Illumio Insights Summary connector."", ""instructions"": [{""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""disconnectLabel"": ""Disconnect"", ""name"": ""toggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illumio%20Insight/Data%20Connectors/IllumioInsightsSummaryConnector_CCP/IllumioInsightsSummary_ConnectorDefinition.json","true" +"Illumio_Auditable_Events_CL","IllumioSaaS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IllumioSaaS","illumioinc1629822633689","illumio_sentinel","2024-05-13","","","Illumio","Partner","https://www.illumio.com/support/support","","domains","IllumioSaaSDataConnector","Illumio","Illumio SaaS","[Illumio](https://www.illumio.com/) connector provides the capability to ingest events into Microsoft Sentinel. The connector provides ability to ingest auditable and flow events from AWS S3 bucket.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the AWS SQS / S3 to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\n\n>**(Optional Step)** Securely store API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Prerequisites"", ""description"": ""1. Ensure AWS SQS is configured for the s3 bucket from which flow and auditable event logs are going to be pulled. In case, Illumio provides bucket, please contact Illumio support for sqs url, s3 bucket name and aws credentials. \n 2. Register AAD application - For DCR (Data collection rule) to authentiate to ingest data into log analytics, you must use Entra application. 1. [Follow the instructions here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-azure-ad-application) (steps 1-5) to get **AAD Tenant Id**, **AAD Client Id** and **AAD Client Secret**. \n 2. Ensure you have created a log analytics workspace. \nPlease keep note of the name and region where it has been deployed.""}, {""title"": ""Deployment"", ""description"": ""Choose one of the approaches from below options. Either use the below ARM template to deploy azure resources or deploy function app manually.""}, {""title"": ""1. Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of Azure resources using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IllumioSaaS-FunctionApp) \t\t\t\n2. Provide the required details such as Microsoft Sentinel Workspace, AWS credentials, Azure AD Application details and ingestion configurations \n> **NOTE:** It is recommended to create a new Resource Group for deployment of function app and associated resources.\n3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n4. Click **Purchase** to deploy.""}, {""title"": ""2. Deploy additional function apps to handle scale"", ""description"": ""Use this method for automated deployment of additional function apps using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IllumioSaaS-QueueTriggerFunctionApp) \t\t\t\n""}, {""title"": ""3. Manual Deployment of Azure Functions"", ""description"": ""Deployment via Visual Studio Code.""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n1. Download the [Azure Function App](https://github.com/Azure/Azure-Sentinel/raw/master/Solutions/IllumioSaaS/Data%20Connectors/IllumioEventsConn.zip) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. Follow documentation to set up all required environment variables and click **Save**. Ensure you restart the function app once settings are saved.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""SQS and AWS S3 account credentials/permissions"", ""description"": ""**AWS_SECRET**, **AWS_REGION_NAME**, **AWS_KEY**, **QUEUE_URL** is required. [See the documentation to learn more about data pulling](). If you are using s3 bucket provided by Illumio, contact Illumio support. At your request they will provide you with the AWS S3 bucket name, AWS SQS url and AWS credentials to access them.""}, {""name"": ""Illumio API key and secret"", ""description"": ""**ILLUMIO_API_KEY**, **ILLUMIO_API_SECRET** is required for a workbook to make connection to SaaS PCE and fetch api responses.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IllumioSaaS/Data%20Connectors/IllumioSaaS_FunctionApp.json","true" +"Illumio_Flow_Events_CL","IllumioSaaS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IllumioSaaS","illumioinc1629822633689","illumio_sentinel","2024-05-13","","","Illumio","Partner","https://www.illumio.com/support/support","","domains","IllumioSaaSDataConnector","Illumio","Illumio SaaS","[Illumio](https://www.illumio.com/) connector provides the capability to ingest events into Microsoft Sentinel. The connector provides ability to ingest auditable and flow events from AWS S3 bucket.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the AWS SQS / S3 to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\n\n>**(Optional Step)** Securely store API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Prerequisites"", ""description"": ""1. Ensure AWS SQS is configured for the s3 bucket from which flow and auditable event logs are going to be pulled. In case, Illumio provides bucket, please contact Illumio support for sqs url, s3 bucket name and aws credentials. \n 2. Register AAD application - For DCR (Data collection rule) to authentiate to ingest data into log analytics, you must use Entra application. 1. [Follow the instructions here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-azure-ad-application) (steps 1-5) to get **AAD Tenant Id**, **AAD Client Id** and **AAD Client Secret**. \n 2. Ensure you have created a log analytics workspace. \nPlease keep note of the name and region where it has been deployed.""}, {""title"": ""Deployment"", ""description"": ""Choose one of the approaches from below options. Either use the below ARM template to deploy azure resources or deploy function app manually.""}, {""title"": ""1. Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of Azure resources using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IllumioSaaS-FunctionApp) \t\t\t\n2. Provide the required details such as Microsoft Sentinel Workspace, AWS credentials, Azure AD Application details and ingestion configurations \n> **NOTE:** It is recommended to create a new Resource Group for deployment of function app and associated resources.\n3. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n4. Click **Purchase** to deploy.""}, {""title"": ""2. Deploy additional function apps to handle scale"", ""description"": ""Use this method for automated deployment of additional function apps using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-IllumioSaaS-QueueTriggerFunctionApp) \t\t\t\n""}, {""title"": ""3. Manual Deployment of Azure Functions"", ""description"": ""Deployment via Visual Studio Code.""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n1. Download the [Azure Function App](https://github.com/Azure/Azure-Sentinel/raw/master/Solutions/IllumioSaaS/Data%20Connectors/IllumioEventsConn.zip) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. Follow documentation to set up all required environment variables and click **Save**. Ensure you restart the function app once settings are saved.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""SQS and AWS S3 account credentials/permissions"", ""description"": ""**AWS_SECRET**, **AWS_REGION_NAME**, **AWS_KEY**, **QUEUE_URL** is required. [See the documentation to learn more about data pulling](). If you are using s3 bucket provided by Illumio, contact Illumio support. At your request they will provide you with the AWS S3 bucket name, AWS SQS url and AWS credentials to access them.""}, {""name"": ""Illumio API key and secret"", ""description"": ""**ILLUMIO_API_KEY**, **ILLUMIO_API_SECRET** is required for a workbook to make connection to SaaS PCE and fetch api responses.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IllumioSaaS/Data%20Connectors/IllumioSaaS_FunctionApp.json","true" +"IllumioFlowEventsV2_CL","IllumioSaaS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IllumioSaaS","illumioinc1629822633689","illumio_sentinel","2024-05-13","","","Illumio","Partner","https://www.illumio.com/support/support","","domains","IllumioSaasCCFDefinition","Microsoft","Illumio Saas","The Illumio Saas Cloud data connector provides the capability to ingest Flow logs into Microsoft Sentinel using the Illumio Saas Log Integration through AWS S3 Bucket. Refer to [Illumio Saas Log Integration](https://product-docs-repo.illumio.com/Tech-Docs/CloudSecure/out/en/administer-cloudsecure/connector.html#UUID-c14edaab-9726-1f23-9c4c-bc2937be39ee_section-idm234556433515698) for more information.","[{""title"": ""Connect Illumio Saas to Microsoft Sentinel\n\n"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": "">**NOTE:** This connector fetches the Illumio Saas Flow logs from AWS S3 bucket""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""To gather data from Illumio, you need to configure the following resources""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. AWS Role ARN \n To gather data from Illumio, you'll need AWS Role ARN.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. AWS SQS Queue URL \n To gather data from Illumio, you'll need AWS SQS Queue URL.\n\n""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""For detailed steps to retrieve the AWS Role ARN, SQS Queue URL, and configure Illumio log forwarding to the Amazon S3 bucket, refer to the [Connector Setup Guide](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IllumioSaaS/Data%20Connectors/IllumioSaasLogs_ccf/Readme.md).""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""AWS Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""AWS SQS Queue URL""}, {""columnValue"": ""properties.destinationTable"", ""columnName"": ""Table Name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add Account"", ""title"": ""Add Account"", ""subtitle"": ""Add Account"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""placeholder"": ""Enter Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Flow Log Queue URL"", ""placeholder"": ""Enter Flow log SQL Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""required"": true}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IllumioSaaS/Data%20Connectors/IllumioSaasLogs_ccf/IllumioSaasLogs_ConnectorDefinition.json","true" +"","Illusive Active Defense","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illusive%20Active%20Defense","","","","","","","","","","","","","","","","","","false" +"CommonSecurityLog","Illusive Platform","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illusive%20Platform","illusivenetworks","illusive_platform_mss","2022-05-25","","","Illusive Networks","Partner","https://illusive.com/support","","domains","illusiveAttackManagementSystem","illusive","[Deprecated] Illusive Platform via Legacy Agent","The Illusive Platform Connector allows you to share Illusive's attack surface analysis data and incident logs with Microsoft Sentinel and view this information in dedicated dashboards that offer insight into your organization's attack surface risk (ASM Dashboard) and track unauthorized lateral movement in your organization's network (ADS Dashboard).","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Illusive Common Event Format (CEF) logs to Syslog agent"", ""description"": ""1. Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address.\n> 2. Log onto the Illusive Console, and navigate to Settings->Reporting.\n> 3. Find Syslog Servers\n> 4. Supply the following information:\n>> 1. Host name: Linux Syslog agent IP address or FQDN host name\n>> 2. Port: 514\n>> 3. Protocol: TCP\n>> 4. Audit messages: Send audit messages to server\n> 5. To add the syslog server, click Add.\n> 6. For more information about how to add a new syslog server in the Illusive platform, please find the Illusive Networks Admin Guide in here: https://support.illusivenetworks.com/hc/en-us/sections/360002292119-Documentation-by-Version""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illusive%20Platform/Data%20Connectors/illusive%20Attack%20Management%20System.json","true" +"CommonSecurityLog","Illusive Platform","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illusive%20Platform","illusivenetworks","illusive_platform_mss","2022-05-25","","","Illusive Networks","Partner","https://illusive.com/support","","domains","illusiveAttackManagementSystemAma","illusive","[Deprecated] Illusive Platform via AMA","The Illusive Platform Connector allows you to share Illusive's attack surface analysis data and incident logs with Microsoft Sentinel and view this information in dedicated dashboards that offer insight into your organization's attack surface risk (ASM Dashboard) and track unauthorized lateral movement in your organization's network (ADS Dashboard).","[{""title"": """", ""description"": """", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine""}, {""title"": ""Step B. Forward Illusive Common Event Format (CEF) logs to Syslog agent"", ""description"": ""1. Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address.\n> 2. Log onto the Illusive Console, and navigate to Settings->Reporting.\n> 3. Find Syslog Servers\n> 4. Supply the following information:\n>> 1. Host name: Linux Syslog agent IP address or FQDN host name\n>> 2. Port: 514\n>> 3. Protocol: TCP\n>> 4. Audit messages: Send audit messages to server\n> 5. To add the syslog server, click Add.\n> 6. For more information about how to add a new syslog server in the Illusive platform, please find the Illusive Networks Admin Guide in here: https://support.illusivenetworks.com/hc/en-us/sections/360002292119-Documentation-by-Version""}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Illusive%20Platform/Data%20Connectors/template_IllusivePlatformAMA.json","true" +"","Images","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Images","","","","","","","","","","","","","","","","","","false" +"CommonSecurityLog","Imperva WAF Gateway","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Imperva%20WAF%20Gateway","imperva","Imperva_WAF_Gateway_MSS","2022-05-02","","","Imperva","Partner","https://www.imperva.com/support/technical-support/","","domains","ImpervaWAFGateway","Imperva","Imperva WAF Gateway","The [Imperva](https://www.imperva.com) connector will allow you to quickly connect your Imperva WAF Gateway alerts to Azure Sentinel. This provides you additional insight into your organization's WAF traffic and improves your security operation capabilities.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Azure Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Azure Sentinel will use as the proxy between your security solution and Azure Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Azure Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address.""}, {""title"": ""3. SecureSphere MX Configuration"", ""description"": ""This connector requires an Action Interface and Action Set to be created on the Imperva SecureSphere MX. [Follow the steps](https://community.imperva.com/blogs/craig-burlingame1/2020/11/13/steps-for-enabling-imperva-waf-gateway-alert) to create the requirements."", ""innerSteps"": [{""title"": ""3.1 Create the Action Interface"", ""description"": ""Create a new Action Interface that contains the required parameters to send WAF alerts to Azure Sentinel.""}, {""title"": ""3.2 Create the Action Set "", ""description"": ""Create a new Action Set that uses the Action Interface configured.""}, {""title"": ""3.3 Apply the Action Set"", ""description"": ""Apply the Action Set to any Security Policies you wish to have alerts for sent to Azure Sentinel.""}]}, {""title"": ""4. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n"", ""innerSteps"": [{""title"": ""4.1 Check for logs in the past 5 minutes using the following command.\n\nCommonSecurityLog | where DeviceVendor == \""Imperva Inc.\"" | where DeviceProduct == \""WAF Gateway\"" | where TimeGenerated == ago(5m)""}]}, {""title"": ""5. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Imperva%20WAF%20Gateway/Data%20Connectors/Connector_Imperva_WAF_Gateway.json","true" +"ImpervaWAFCloudV2_CL","ImpervaCloudWAF","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ImpervaCloudWAF","azuresentinel","azure-sentinel-solution-impervawafcloud","2021-09-28","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","ImpervaCloudWAFLogsCCFDefinition","Microsoft","Imperva Cloud WAF","The Imperva WAF Cloud data connector provides the capability to ingest logs into Microsoft Sentinel using the Imperva Log Integration through AWS S3 Bucket. Refer to [Imperva WAF Cloud Log Integration](https://docs.imperva.com/bundle/cloud-application-security/page/settings/log-integration.htm) for more information.","[{""title"": ""Connect Imperva WAF Cloud to Microsoft Sentinel\n\n"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": "">**NOTE:** This connector fetches the Imperva Cloud WAF logs from AWS S3 bucket""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""To gather data from Imperva, you need to configure the following resources""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. AWS Role ARN \n To gather data from Imperva, you'll need AWS Role ARN.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. AWS SQS Queue URL \n To gather data from Imperva, you'll need AWS SQS Queue URL.\n\n""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""For detailed steps to retrieve the AWS Role ARN, SQS Queue URL, and configure Imperva log forwarding to the Amazon S3 bucket, refer to the [Connector Setup Guide](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ImpervaCloudWAF/Data%20Connectors/Readme.md).""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""AWS Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""AWS SQS Queue URL""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add Account"", ""title"": ""Add Account"", ""subtitle"": ""Add Account"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""placeholder"": ""Enter Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""placeholder"": ""Enter SQL Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""required"": true}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ImpervaCloudWAF/Data%20Connectors/ImpervaCloudWAFLogs_ccf/ImpervaCloudWAFLogs_ConnectorDefinition.json","true" +"ImpervaWAFCloud_CL","ImpervaCloudWAF","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ImpervaCloudWAF","azuresentinel","azure-sentinel-solution-impervawafcloud","2021-09-28","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","ImpervaWAFCloudAPI","Imperva","Imperva Cloud WAF","The [Imperva Cloud WAF](https://www.imperva.com/resources/resource-library/datasheets/imperva-cloud-waf/) data connector provides the capability to integrate and ingest Web Application Firewall events into Microsoft Sentinel through the REST API. Refer to Log integration [documentation](https://docs.imperva.com/bundle/cloud-application-security/page/settings/log-integration.htm#Download) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Imperva Cloud API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""description"": "">**NOTE:**This data connector depends on a parser based on a Kusto Function to work as expected [**ImpervaWAFCloud**](https://aka.ms/sentinel-impervawafcloud-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Log Integration**\n\n [Follow the instructions](https://docs.imperva.com/bundle/cloud-application-security/page/settings/log-integration.htm#Setuplogintegration) to obtain the credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Imperva Cloud WAF data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-impervawafcloud-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **ImpervaAPIID**, **ImpervaAPIKey**, **ImpervaLogServerURI** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Imperva Cloud WAF data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure functions development.\n\n1. Download the [Azure Functions App](https://aka.ms/sentinel-impervawafcloud-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ImpervaCloudXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tImpervaAPIID\n\t\tImpervaAPIKey\n\t\tImpervaLogServerURI\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**ImpervaAPIID**, **ImpervaAPIKey**, **ImpervaLogServerURI** are required for the API. [See the documentation to learn more about Setup Log Integration process](https://docs.imperva.com/bundle/cloud-application-security/page/settings/log-integration.htm#Setuplogintegration). Check all [requirements and follow the instructions](https://docs.imperva.com/bundle/cloud-application-security/page/settings/log-integration.htm#Setuplogintegration) for obtaining credentials. Please note that this connector uses CEF log event format. [More information](https://docs.imperva.com/bundle/cloud-application-security/page/more/log-file-structure.htm#Logfilestructure) about log format.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ImpervaCloudWAF/Data%20Connectors/ImpervaWAFCloud_FunctionApp.json","true" +"Failed_Range_To_Ingest_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" +"Infoblox_Failed_Indicators_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" +"dossier_atp_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" +"dossier_atp_threat_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" +"dossier_dns_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" +"dossier_geo_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" +"dossier_infoblox_web_cat_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" +"dossier_inforank_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" +"dossier_malware_analysis_v3_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" +"dossier_nameserver_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" +"dossier_nameserver_matches_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" +"dossier_ptr_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" +"dossier_rpz_feeds_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" +"dossier_rpz_feeds_records_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" +"dossier_threat_actor_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" +"dossier_tld_risk_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" +"dossier_whitelist_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" +"dossier_whois_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxDataConnector","Infoblox","Infoblox Data Connector via REST API","The Infoblox Data Connector allows you to easily connect your Infoblox TIDE data and Dossier data with Microsoft Sentinel. By connecting your data to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Infoblox API to create Threat Indicators for TIDE and pull Dossier data into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to generate the Infoblox API Credentials**\n\n Follow these instructions to generate Infoblox API Key.\n In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": """", ""description"": ""**STEP 5 - Steps to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Infoblox data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Infoblox API Authorization Credentials"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Infoblox Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-infoblox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tAzure Tenant Id \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tInfoblox API Token \n\t\tInfoblox Base URL \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tLog Level (Default: INFO) \n\t\tConfidence \n\t\tThreat Level \n\t\tApp Insights Workspace Resource ID \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Infoblox API Key** is required. See the documentation to learn more about API on the [Rest API reference](https://csp.infoblox.com/apidoc?url=https://csp.infoblox.com/apidoc/docs/Infrastructure#/Services/ServicesRead)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxCloudDataConnector/Infoblox_API_FunctionApp.json","true" +"CommonSecurityLog","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxSOCInsightsDataConnector_AMA","Infoblox","[Recommended] Infoblox SOC Insight Data Connector via AMA","The Infoblox SOC Insight Data Connector allows you to easily connect your Infoblox BloxOne SOC Insight data with Microsoft Sentinel. By connecting your logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.

This data connector ingests Infoblox SOC Insight CDC logs into your Log Analytics Workspace using the new Azure Monitor Agent. Learn more about ingesting using the new Azure Monitor Agent [here](https://learn.microsoft.com/azure/sentinel/connect-cef-ama). **Microsoft recommends using this Data Connector.**","[{""title"": ""Workspace Keys"", ""description"": ""In order to use the playbooks as part of this solution, find your **Workspace ID** and **Workspace Primary Key** below for your convenience."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Workspace Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Parsers"", ""description"": "">This data connector depends on a parser based on a Kusto Function to work as expected called [**InfobloxCDC_SOCInsights**](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20SOC%20Insights/Parsers/InfobloxCDC_SOCInsights.yaml) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": ""SOC Insights"", ""description"": "">This data connector assumes you have access to Infoblox BloxOne Threat Defense SOC Insights. You can find more information about SOC Insights [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/501514252/SOC+Insights).""}, {""title"": ""Infoblox Cloud Data Connector"", ""description"": "">This data connector assumes an Infoblox Data Connector host has already been created and configured in the Infoblox Cloud Services Portal (CSP). As the [**Infoblox Data Connector**](https://docs.infoblox.com/display/BloxOneThreatDefense/Deploying+the+Data+Connector+Solution) is a feature of BloxOne Threat Defense, access to an appropriate BloxOne Threat Defense subscription is required. See this [**quick-start guide**](https://www.infoblox.com/wp-content/uploads/infoblox-deployment-guide-data-connector.pdf) for more information and licensing requirements."", ""instructions"": [{""parameters"": {""title"": ""Follow the steps below to configure this data connector"", ""instructionSteps"": [{""title"": ""A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note: CEF logs are collected only from Linux Agents_\n\n1. Navigate to your **Microsoft Sentinel workspace > Data connectors** blade.\n\n2. Search for the **Common Event Format (CEF) via AMA** data connector and open it.\n\n3. Ensure there is no existing DCR configured to collect required facility of logs as it may cause log duplication. Create a new **DCR (Data Collection Rule)**.\n\n\t_Note: It is recommended to install the AMA agent v1.27 at minimum. [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplication._\n\n4. Run the command provided in the **Common Event Format (CEF) via AMA** data connector page to configure the CEF collector on the machine.""}, {""title"": ""B. Within the Infoblox Cloud Services Portal, configure Infoblox BloxOne to send CEF Syslog data to the Infoblox Cloud Data Connector to forward to the Syslog agent"", ""description"": ""Follow the steps below to configure the Infoblox CDC to send BloxOne data to Microsoft Sentinel via the Linux Syslog agent.\n1. Navigate to **Manage > Data Connector**.\n2. Click the **Destination Configuration** tab at the top.\n3. Click **Create > Syslog**. \n - **Name**: Give the new Destination a meaningful **name**, such as **Microsoft-Sentinel-Destination**.\n - **Description**: Optionally give it a meaningful **description**.\n - **State**: Set the state to **Enabled**.\n - **Format**: Set the format to **CEF**.\n - **FQDN/IP**: Enter the IP address of the Linux device on which the Linux agent is installed.\n - **Port**: Leave the port number at **514**.\n - **Protocol**: Select desired protocol and CA certificate if applicable.\n - Click **Save & Close**.\n4. Click the **Traffic Flow Configuration** tab at the top.\n5. Click **Create**.\n - **Name**: Give the new Traffic Flow a meaningful **name**, such as **Microsoft-Sentinel-Flow**.\n - **Description**: Optionally give it a meaningful **description**. \n - **State**: Set the state to **Enabled**. \n - Expand the **Service Instance** section. \n - **Service Instance**: Select your desired Service Instance for which the Data Connector service is enabled. \n - Expand the **Source Configuration** section. \n - **Source**: Select **BloxOne Cloud Source**. \n - Select the **Internal Notifications** Log Type.\n - Expand the **Destination Configuration** section. \n - Select the **Destination** you just created. \n - Click **Save & Close**. \n6. Allow the configuration some time to activate.""}, {""title"": ""C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed. [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxSOCInsights/InfobloxSOCInsightsDataConnector_AMA.json","true" +"InfobloxInsight_CL","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxSOCInsightsDataConnector_API","Infoblox","Infoblox SOC Insight Data Connector via REST API","The Infoblox SOC Insight Data Connector allows you to easily connect your Infoblox BloxOne SOC Insight data with Microsoft Sentinel. By connecting your logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": ""Workspace Keys"", ""description"": ""In order to use the playbooks as part of this solution, find your **Workspace ID** and **Workspace Primary Key** below for your convenience."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Workspace Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Parsers"", ""description"": "">This data connector depends on a parser based on a Kusto Function to work as expected called [**InfobloxInsight**](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20SOC%20Insights/Parsers/InfobloxInsight.yaml) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": ""SOC Insights"", ""description"": "">This data connector assumes you have access to Infoblox BloxOne Threat Defense SOC Insights. You can find more information about SOC Insights [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/501514252/SOC+Insights).""}, {""title"": ""Follow the steps below to configure this data connector"", ""description"": """", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""1. Generate an Infoblox API Key and copy it somewhere safe"", ""description"": ""In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F).""}, {""title"": ""2. Configure the Infoblox-SOC-Get-Open-Insights-API playbook"", ""description"": ""Create and configure the **Infoblox-SOC-Get-Open-Insights-API** playbook which is deployed with this solution. Enter your Infoblox API key in the appropriate parameter when prompted.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxSOCInsights/InfobloxSOCInsightsDataConnector_API.json","true" +"CommonSecurityLog","Infoblox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox","infoblox","infoblox-app-for-microsoft-sentinel","2024-07-15","2024-07-15","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxSOCInsightsDataConnector_Legacy","Infoblox","[Deprecated] Infoblox SOC Insight Data Connector via Legacy Agent","The Infoblox SOC Insight Data Connector allows you to easily connect your Infoblox BloxOne SOC Insight data with Microsoft Sentinel. By connecting your logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.

This data connector ingests Infoblox SOC Insight CDC logs into your Log Analytics Workspace using the legacy Log Analytics agent.

**Microsoft recommends installation of Infoblox SOC Insight Data Connector via AMA Connector.** The legacy connector uses the Log Analytics agent which is about to be deprecated by **Aug 31, 2024,** and should only be installed where AMA is not supported.

Using MMA and AMA on the same machine can cause log duplication and extra ingestion cost. [More details](https://learn.microsoft.com/en-us/azure/sentinel/ama-migrate).","[{""title"": ""Workspace Keys"", ""description"": ""In order to use the playbooks as part of this solution, find your **Workspace ID** and **Workspace Primary Key** below for your convenience."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Workspace Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Parsers"", ""description"": "">This data connector depends on a parser based on a Kusto Function to work as expected called [**InfobloxCDC_SOCInsights**](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20SOC%20Insights/Parsers/InfobloxCDC_SOCInsights.yaml) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""SOC Insights"", ""description"": "">This data connector assumes you have access to Infoblox BloxOne Threat Defense SOC Insights. You can find more information about SOC Insights [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/501514252/SOC+Insights). "", ""instructions"": []}, {""title"": ""Infoblox Cloud Data Connector"", ""description"": "">This data connector assumes an Infoblox Data Connector host has already been created and configured in the Infoblox Cloud Services Portal (CSP). As the [**Infoblox Data Connector**](https://docs.infoblox.com/display/BloxOneThreatDefense/Deploying+the+Data+Connector+Solution) is a feature of BloxOne Threat Defense, access to an appropriate BloxOne Threat Defense subscription is required. See this [**quick-start guide**](https://www.infoblox.com/wp-content/uploads/infoblox-deployment-guide-data-connector.pdf) for more information and licensing requirements."", ""instructions"": []}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Within the Infoblox Cloud Services Portal, configure Infoblox BloxOne to send CEF Syslog data to the Infoblox Cloud Data Connector to forward to the Syslog agent"", ""description"": ""Follow the steps below to configure the Infoblox CDC to send BloxOne data to Microsoft Sentinel via the Linux Syslog agent.\n1. Navigate to **Manage > Data Connector**.\n2. Click the **Destination Configuration** tab at the top.\n3. Click **Create > Syslog**. \n - **Name**: Give the new Destination a meaningful **name**, such as **Microsoft-Sentinel-Destination**.\n - **Description**: Optionally give it a meaningful **description**.\n - **State**: Set the state to **Enabled**.\n - **Format**: Set the format to **CEF**.\n - **FQDN/IP**: Enter the IP address of the Linux device on which the Linux agent is installed.\n - **Port**: Leave the port number at **514**.\n - **Protocol**: Select desired protocol and CA certificate if applicable.\n - Click **Save & Close**.\n4. Click the **Traffic Flow Configuration** tab at the top.\n5. Click **Create**.\n - **Name**: Give the new Traffic Flow a meaningful **name**, such as **Microsoft-Sentinel-Flow**.\n - **Description**: Optionally give it a meaningful **description**. \n - **State**: Set the state to **Enabled**. \n - Expand the **Service Instance** section. \n - **Service Instance**: Select your desired Service Instance for which the Data Connector service is enabled. \n - Expand the **Source Configuration** section. \n - **Source**: Select **BloxOne Cloud Source**. \n - Select the **Internal Notifications** Log Type.\n - Expand the **Destination Configuration** section. \n - Select the **Destination** you just created. \n - Click **Save & Close**. \n6. Allow the configuration some time to activate.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox/Data%20Connectors/InfobloxSOCInsights/InfobloxSOCInsightsDataConnector_Legacy.json","true" +"CommonSecurityLog","Infoblox Cloud Data Connector","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20Cloud%20Data%20Connector","infoblox","infoblox-cdc-solution","2021-10-20","","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxCloudDataConnector","Infoblox","[Deprecated] Infoblox Cloud Data Connector via Legacy Agent","The Infoblox Cloud Data Connector allows you to easily connect your Infoblox BloxOne data with Microsoft Sentinel. By connecting your logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**IMPORTANT:** This data connector depends on a parser based on a Kusto Function to work as expected called [**InfobloxCDC**](https://aka.ms/sentinel-InfobloxCloudDataConnector-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** This Microsoft Sentinel data connector assumes an Infoblox Data Connector host has already been created and configured in the Infoblox Cloud Services Portal (CSP). As the [**Infoblox Data Connector**](https://docs.infoblox.com/display/BloxOneThreatDefense/Deploying+the+Data+Connector+Solution) is a feature of BloxOne Threat Defense, access to an appropriate BloxOne Threat Defense subscription is required. See this [**quick-start guide**](https://www.infoblox.com/wp-content/uploads/infoblox-deployment-guide-data-connector.pdf) for more information and licensing requirements."", ""instructions"": []}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Configure Infoblox BloxOne to send Syslog data to the Infoblox Cloud Data Connector to forward to the Syslog agent"", ""description"": ""Follow the steps below to configure the Infoblox CDC to send BloxOne data to Microsoft Sentinel via the Linux Syslog agent.\n1. Navigate to **Manage > Data Connector**.\n2. Click the **Destination Configuration** tab at the top.\n3. Click **Create > Syslog**. \n - **Name**: Give the new Destination a meaningful **name**, such as **Microsoft-Sentinel-Destination**.\n - **Description**: Optionally give it a meaningful **description**.\n - **State**: Set the state to **Enabled**.\n - **Format**: Set the format to **CEF**.\n - **FQDN/IP**: Enter the IP address of the Linux device on which the Linux agent is installed.\n - **Port**: Leave the port number at **514**.\n - **Protocol**: Select desired protocol and CA certificate if applicable.\n - Click **Save & Close**.\n4. Click the **Traffic Flow Configuration** tab at the top.\n5. Click **Create**.\n - **Name**: Give the new Traffic Flow a meaningful **name**, such as **Microsoft-Sentinel-Flow**.\n - **Description**: Optionally give it a meaningful **description**. \n - **State**: Set the state to **Enabled**. \n - Expand the **Service Instance** section. \n - **Service Instance**: Select your desired Service Instance for which the Data Connector service is enabled. \n - Expand the **Source Configuration** section. \n - **Source**: Select **BloxOne Cloud Source**. \n - Select all desired **log types** you wish to collect. Currently supported log types are:\n - Threat Defense Query/Response Log\n - Threat Defense Threat Feeds Hits Log\n - DDI Query/Response Log\n - DDI DHCP Lease Log\n - Expand the **Destination Configuration** section. \n - Select the **Destination** you just created. \n - Click **Save & Close**. \n6. Allow the configuration some time to activate.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20Cloud%20Data%20Connector/Data%20Connectors/InfobloxCloudDataConnector.json","true" +"CommonSecurityLog","Infoblox Cloud Data Connector","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20Cloud%20Data%20Connector","infoblox","infoblox-cdc-solution","2021-10-20","","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxCloudDataConnectorAma","Infoblox","[Deprecated] Infoblox Cloud Data Connector via AMA","The Infoblox Cloud Data Connector allows you to easily connect your Infoblox BloxOne data with Microsoft Sentinel. By connecting your logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": """", ""description"": "">**IMPORTANT:** This data connector depends on a parser based on a Kusto Function to work as expected called [**InfobloxCDC**](https://aka.ms/sentinel-InfobloxCloudDataConnector-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** This Microsoft Sentinel data connector assumes an Infoblox Data Connector host has already been created and configured in the Infoblox Cloud Services Portal (CSP). As the [**Infoblox Data Connector**](https://docs.infoblox.com/display/BloxOneThreatDefense/Deploying+the+Data+Connector+Solution) is a feature of BloxOne Threat Defense, access to an appropriate BloxOne Threat Defense subscription is required. See this [**quick-start guide**](https://www.infoblox.com/wp-content/uploads/infoblox-deployment-guide-data-connector.pdf) for more information and licensing requirements."", ""instructions"": [{""parameters"": {""title"": ""1. Follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note: CEF logs are collected only from Linux Agents_\n\n1. Navigate to your **Microsoft Sentinel workspace > Data connectors** blade.\n\n2. Search for the **Common Event Format (CEF) via AMA** data connector and open it.\n\n3. Ensure there is no existing DCR configured to collect required facility of logs as it may cause log duplication. Create a new **DCR (Data Collection Rule)**.\n\n\t_Note: It is recommended to install the AMA agent v1.27 at minimum. [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplication._\n\n4. Run the command provided in the **CEF via AMA data connector** page to configure the CEF collector on the machine.""}, {""title"": ""Step B. Configure Infoblox BloxOne to send Syslog data to the Infoblox Cloud Data Connector to forward to the Syslog agent"", ""description"": ""Follow the steps below to configure the Infoblox CDC to send BloxOne data to Microsoft Sentinel via the Linux Syslog agent.\n1. Navigate to **Manage > Data Connector**.\n2. Click the **Destination Configuration** tab at the top.\n3. Click **Create > Syslog**. \n - **Name**: Give the new Destination a meaningful **name**, such as **Microsoft-Sentinel-Destination**.\n - **Description**: Optionally give it a meaningful **description**.\n - **State**: Set the state to **Enabled**.\n - **Format**: Set the format to **CEF**.\n - **FQDN/IP**: Enter the IP address of the Linux device on which the Linux agent is installed.\n - **Port**: Leave the port number at **514**.\n - **Protocol**: Select desired protocol and CA certificate if applicable.\n - Click **Save & Close**.\n4. Click the **Traffic Flow Configuration** tab at the top.\n5. Click **Create**.\n - **Name**: Give the new Traffic Flow a meaningful **name**, such as **Microsoft-Sentinel-Flow**.\n - **Description**: Optionally give it a meaningful **description**. \n - **State**: Set the state to **Enabled**. \n - Expand the **Service Instance** section. \n - **Service Instance**: Select your desired Service Instance for which the Data Connector service is enabled. \n - Expand the **Source Configuration** section. \n - **Source**: Select **BloxOne Cloud Source**. \n - Select all desired **log types** you wish to collect. Currently supported log types are:\n - Threat Defense Query/Response Log\n - Threat Defense Threat Feeds Hits Log\n - DDI Query/Response Log\n - DDI DHCP Lease Log\n - Expand the **Destination Configuration** section. \n - Select the **Destination** you just created. \n - Click **Save & Close**. \n6. Allow the configuration some time to activate.""}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20Cloud%20Data%20Connector/Data%20Connectors/template_InfobloxCloudDataConnectorAMA.json","true" +"Syslog","Infoblox NIOS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20NIOS","azuresentinel","azure-sentinel-solution-infobloxnios","2022-04-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","InfobloxNIOS","Infoblox","[Deprecated] Infoblox NIOS","The [Infoblox Network Identity Operating System (NIOS)](https://www.infoblox.com/glossary/network-identity-operating-system-nios/) connector allows you to easily connect your Infoblox NIOS logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Infoblox and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20NIOS/Parser/Infoblox.yaml), on the second line of the query, enter any unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n 2. Select **Apply below configuration to my machines** and select the facilities and severities.\n 3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure and connect the Infoblox NIOS"", ""description"": ""[Follow these instructions](https://www.infoblox.com/wp-content/uploads/infoblox-deployment-guide-slog-and-snmp-configuration-for-nios.pdf) to enable syslog forwarding of Infoblox NIOS Logs. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address.""}, {""title"": ""4. Configure the Sentinel parser"", ""description"": ""Update the watchlist 'Sources_by_Source' with the hostname(s) of your Infoblox device(s). Set SourceType to 'InfobloxNIOS' and Source to the value of 'Computer' seen in the logs seen in Syslog table.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Infoblox NIOS"", ""description"": ""must be configured to export logs via Syslog""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20NIOS/Data%20Connectors/Connector_Syslog_Infoblox.json","true" +"CommonSecurityLog","Infoblox SOC Insights","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20SOC%20Insights","infoblox","infoblox-soc-insight-solution","2024-03-06","","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxSOCInsightsDataConnector_AMA","Infoblox","[Deprecated] Infoblox SOC Insight Data Connector via AMA","The Infoblox SOC Insight Data Connector allows you to easily connect your Infoblox BloxOne SOC Insight data with Microsoft Sentinel. By connecting your logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.

This data connector ingests Infoblox SOC Insight CDC logs into your Log Analytics Workspace using the new Azure Monitor Agent. Learn more about ingesting using the new Azure Monitor Agent [here](https://learn.microsoft.com/azure/sentinel/connect-cef-ama). **Microsoft recommends using this Data Connector.**","[{""title"": ""Workspace Keys"", ""description"": ""In order to use the playbooks as part of this solution, find your **Workspace ID** and **Workspace Primary Key** below for your convenience."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Workspace Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Parsers"", ""description"": "">This data connector depends on a parser based on a Kusto Function to work as expected called [**InfobloxCDC_SOCInsights**](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20SOC%20Insights/Parsers/InfobloxCDC_SOCInsights.yaml) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": ""SOC Insights"", ""description"": "">This data connector assumes you have access to Infoblox BloxOne Threat Defense SOC Insights. You can find more information about SOC Insights [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/501514252/SOC+Insights).""}, {""title"": ""Infoblox Cloud Data Connector"", ""description"": "">This data connector assumes an Infoblox Data Connector host has already been created and configured in the Infoblox Cloud Services Portal (CSP). As the [**Infoblox Data Connector**](https://docs.infoblox.com/display/BloxOneThreatDefense/Deploying+the+Data+Connector+Solution) is a feature of BloxOne Threat Defense, access to an appropriate BloxOne Threat Defense subscription is required. See this [**quick-start guide**](https://www.infoblox.com/wp-content/uploads/infoblox-deployment-guide-data-connector.pdf) for more information and licensing requirements."", ""instructions"": [{""parameters"": {""title"": ""Follow the steps below to configure this data connector"", ""instructionSteps"": [{""title"": ""A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note: CEF logs are collected only from Linux Agents_\n\n1. Navigate to your **Microsoft Sentinel workspace > Data connectors** blade.\n\n2. Search for the **Common Event Format (CEF) via AMA** data connector and open it.\n\n3. Ensure there is no existing DCR configured to collect required facility of logs as it may cause log duplication. Create a new **DCR (Data Collection Rule)**.\n\n\t_Note: It is recommended to install the AMA agent v1.27 at minimum. [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplication._\n\n4. Run the command provided in the **Common Event Format (CEF) via AMA** data connector page to configure the CEF collector on the machine.""}, {""title"": ""B. Within the Infoblox Cloud Services Portal, configure Infoblox BloxOne to send CEF Syslog data to the Infoblox Cloud Data Connector to forward to the Syslog agent"", ""description"": ""Follow the steps below to configure the Infoblox CDC to send BloxOne data to Microsoft Sentinel via the Linux Syslog agent.\n1. Navigate to **Manage > Data Connector**.\n2. Click the **Destination Configuration** tab at the top.\n3. Click **Create > Syslog**. \n - **Name**: Give the new Destination a meaningful **name**, such as **Microsoft-Sentinel-Destination**.\n - **Description**: Optionally give it a meaningful **description**.\n - **State**: Set the state to **Enabled**.\n - **Format**: Set the format to **CEF**.\n - **FQDN/IP**: Enter the IP address of the Linux device on which the Linux agent is installed.\n - **Port**: Leave the port number at **514**.\n - **Protocol**: Select desired protocol and CA certificate if applicable.\n - Click **Save & Close**.\n4. Click the **Traffic Flow Configuration** tab at the top.\n5. Click **Create**.\n - **Name**: Give the new Traffic Flow a meaningful **name**, such as **Microsoft-Sentinel-Flow**.\n - **Description**: Optionally give it a meaningful **description**. \n - **State**: Set the state to **Enabled**. \n - Expand the **Service Instance** section. \n - **Service Instance**: Select your desired Service Instance for which the Data Connector service is enabled. \n - Expand the **Source Configuration** section. \n - **Source**: Select **BloxOne Cloud Source**. \n - Select the **Internal Notifications** Log Type.\n - Expand the **Destination Configuration** section. \n - Select the **Destination** you just created. \n - Click **Save & Close**. \n6. Allow the configuration some time to activate.""}, {""title"": ""C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed. [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20SOC%20Insights/Data%20Connectors/InfobloxSOCInsightsDataConnector_AMA.json","true" +"InfobloxInsight_CL","Infoblox SOC Insights","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20SOC%20Insights","infoblox","infoblox-soc-insight-solution","2024-03-06","","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxSOCInsightsDataConnector_API","Infoblox","Infoblox SOC Insight Data Connector via REST API","The Infoblox SOC Insight Data Connector allows you to easily connect your Infoblox BloxOne SOC Insight data with Microsoft Sentinel. By connecting your logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": ""Workspace Keys"", ""description"": ""In order to use the playbooks as part of this solution, find your **Workspace ID** and **Workspace Primary Key** below for your convenience."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Workspace Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Parsers"", ""description"": "">This data connector depends on a parser based on a Kusto Function to work as expected called [**InfobloxInsight**](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20SOC%20Insights/Parsers/InfobloxInsight.yaml) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""SOC Insights"", ""description"": "">This data connector assumes you have access to Infoblox BloxOne Threat Defense SOC Insights. You can find more information about SOC Insights [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/501514252/SOC+Insights)."", ""instructions"": []}, {""title"": ""Follow the steps below to configure this data connector"", ""description"": """", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""1. Generate an Infoblox API Key and copy it somewhere safe"", ""description"": ""In the [Infoblox Cloud Services Portal](https://csp.infoblox.com/atlas/app/welcome), generate an API Key and copy it somewhere safe to use in the next step. You can find instructions on how to create API keys [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/230394187/How+Do+I+Create+an+API+Key%3F)."", ""instructions"": []}, {""title"": ""2. Configure the Infoblox-SOC-Get-Open-Insights-API playbook"", ""description"": ""Create and configure the **Infoblox-SOC-Get-Open-Insights-API** playbook which is deployed with this solution. Enter your Infoblox API key in the appropriate parameter when prompted."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20SOC%20Insights/Data%20Connectors/InfobloxSOCInsightsDataConnector_API.json","true" +"CommonSecurityLog","Infoblox SOC Insights","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20SOC%20Insights","infoblox","infoblox-soc-insight-solution","2024-03-06","","","Infoblox","Partner","https://support.infoblox.com/","","domains","InfobloxSOCInsightsDataConnector_Legacy","Infoblox","[Deprecated] Infoblox SOC Insight Data Connector via Legacy Agent","The Infoblox SOC Insight Data Connector allows you to easily connect your Infoblox BloxOne SOC Insight data with Microsoft Sentinel. By connecting your logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.

This data connector ingests Infoblox SOC Insight CDC logs into your Log Analytics Workspace using the legacy Log Analytics agent.

**Microsoft recommends installation of Infoblox SOC Insight Data Connector via AMA Connector.** The legacy connector uses the Log Analytics agent which is about to be deprecated by **Aug 31, 2024,** and should only be installed where AMA is not supported.

Using MMA and AMA on the same machine can cause log duplication and extra ingestion cost. [More details](https://learn.microsoft.com/en-us/azure/sentinel/ama-migrate).","[{""title"": ""Workspace Keys"", ""description"": ""In order to use the playbooks as part of this solution, find your **Workspace ID** and **Workspace Primary Key** below for your convenience."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Workspace Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Parsers"", ""description"": "">This data connector depends on a parser based on a Kusto Function to work as expected called [**InfobloxCDC_SOCInsights**](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20SOC%20Insights/Parsers/InfobloxCDC_SOCInsights.yaml) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""SOC Insights"", ""description"": "">This data connector assumes you have access to Infoblox BloxOne Threat Defense SOC Insights. You can find more information about SOC Insights [**here**](https://docs.infoblox.com/space/BloxOneThreatDefense/501514252/SOC+Insights). "", ""instructions"": []}, {""title"": ""Infoblox Cloud Data Connector"", ""description"": "">This data connector assumes an Infoblox Data Connector host has already been created and configured in the Infoblox Cloud Services Portal (CSP). As the [**Infoblox Data Connector**](https://docs.infoblox.com/display/BloxOneThreatDefense/Deploying+the+Data+Connector+Solution) is a feature of BloxOne Threat Defense, access to an appropriate BloxOne Threat Defense subscription is required. See this [**quick-start guide**](https://www.infoblox.com/wp-content/uploads/infoblox-deployment-guide-data-connector.pdf) for more information and licensing requirements."", ""instructions"": []}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Within the Infoblox Cloud Services Portal, configure Infoblox BloxOne to send CEF Syslog data to the Infoblox Cloud Data Connector to forward to the Syslog agent"", ""description"": ""Follow the steps below to configure the Infoblox CDC to send BloxOne data to Microsoft Sentinel via the Linux Syslog agent.\n1. Navigate to **Manage > Data Connector**.\n2. Click the **Destination Configuration** tab at the top.\n3. Click **Create > Syslog**. \n - **Name**: Give the new Destination a meaningful **name**, such as **Microsoft-Sentinel-Destination**.\n - **Description**: Optionally give it a meaningful **description**.\n - **State**: Set the state to **Enabled**.\n - **Format**: Set the format to **CEF**.\n - **FQDN/IP**: Enter the IP address of the Linux device on which the Linux agent is installed.\n - **Port**: Leave the port number at **514**.\n - **Protocol**: Select desired protocol and CA certificate if applicable.\n - Click **Save & Close**.\n4. Click the **Traffic Flow Configuration** tab at the top.\n5. Click **Create**.\n - **Name**: Give the new Traffic Flow a meaningful **name**, such as **Microsoft-Sentinel-Flow**.\n - **Description**: Optionally give it a meaningful **description**. \n - **State**: Set the state to **Enabled**. \n - Expand the **Service Instance** section. \n - **Service Instance**: Select your desired Service Instance for which the Data Connector service is enabled. \n - Expand the **Source Configuration** section. \n - **Source**: Select **BloxOne Cloud Source**. \n - Select the **Internal Notifications** Log Type.\n - Expand the **Destination Configuration** section. \n - Select the **Destination** you just created. \n - Click **Save & Close**. \n6. Allow the configuration some time to activate.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Infoblox%20SOC%20Insights/Data%20Connectors/InfobloxSOCInsightsDataConnector_Legacy.json","true" +"","InsightVM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/InsightVM","","","","","","","","","","","","","","","","","","false" +"atlassian_beacon_alerts_CL","Integration for Atlassian Beacon","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Integration%20for%20Atlassian%20Beacon","defendlimited1682894612656","microsoft-sentinel-solution-atlassian-beacon","2023-09-22","","","DEFEND Ltd.","Partner","https://www.defend.co.nz/","","domains","AtlassianBeaconAlerts","DEFEND Ltd.","Atlassian Beacon Alerts","Atlassian Beacon is a cloud product that is built for Intelligent threat detection across the Atlassian platforms (Jira, Confluence, and Atlassian Admin). This can help users detect, investigate and respond to risky user activity for the Atlassian suite of products. The solution is a custom data connector from DEFEND Ltd. that is used to visualize the alerts ingested from Atlassian Beacon to Microsoft Sentinel via a Logic App.","[{""description"": "">1. Navigate to the newly installed Logic App 'Atlassian Beacon Integration'\n\n>2. Navigate to 'Logic app designer'\n\n>3. Expand the 'When a HTTP request is received'\n\n>4. Copy the 'HTTP POST URL'"", ""title"": ""1. Microsoft Sentinel""}, {""description"": "">1. Login to Atlassian Beacon using an admin account\n\n>2. Navigate to 'SIEM forwarding' under SETTINGS\n\n> 3. Paste the copied URL from Logic App in the text box\n\n> 4. Click the 'Save' button"", ""title"": ""2. Atlassian Beacon""}, {""description"": "">1. Login to Atlassian Beacon using an admin account\n\n>2. Navigate to 'SIEM forwarding' under SETTINGS\n\n> 3. Click the 'Test' button right next to the newly configured webhook\n\n> 4. Navigate to Microsoft Sentinel\n\n> 5. Navigate to the newly installed Logic App\n\n> 6. Check for the Logic App Run under 'Runs history'\n\n> 7. Check for logs under the table name 'atlassian_beacon_alerts_CL' in 'Logs'\n\n> 8. If the analytic rule has been enabled, the above Test alert should have created an incident in Microsoft Sentinel"", ""title"": ""3. Testing and Validation""}]","{""resourceProvider"": [{""permissionsDisplayText"": ""read and write permissions are required."", ""provider"": ""Microsoft.OperationalInsights/workspaces"", ""providerDisplayName"": ""Workspace"", ""requiredPermissions"": {""delete"": true, ""read"": true, ""write"": true}, ""scope"": ""Workspace""}, {""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""providerDisplayName"": ""Keys"", ""requiredPermissions"": {""action"": true}, ""scope"": ""Workspace""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Integration%20for%20Atlassian%20Beacon/Data%20Connectors/AtlassianBeacon_DataConnector.json","true" +"","Intel471","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Intel471","intel471inc1641226539011","microsoft-sentinel-solution-intel471","2023-06-21","","","Intel 471","Partner","https://intel471.com/company/contact","","domains","","","","","","","","false" +"","IoTOTThreatMonitoringwithDefenderforIoT","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IoTOTThreatMonitoringwithDefenderforIoT","azuresentinel","azure-sentinel-solution-unifiedmicrosoftsocforot","2021-10-26","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","IronNet IronDefense","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/IronNet%20IronDefense","ironnetcybersecurity1585849518753","irondefense-for-sentinel","2021-10-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"Island_Admin_CL","Island","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Island","islandtechnologyinc1679434413850","island-sentinel-solution","2023-05-02","2023-07-20","","Island","Partner","https://www.island.io","","domains","Island_Admin_Polling","Island","Island Enterprise Browser Admin Audit (Polling CCP)","The [Island](https://www.island.io) Admin connector provides the capability to ingest Island Admin Audit logs into Microsoft Sentinel.","[{""title"": ""Connect Island to Microsoft Sentinel"", ""description"": ""Provide the Island API URL and Key. API URL is https://management.island.io/api/external/v1/adminActions for US or https://eu.management.island.io/api/external/v1/adminActions for EU.\n Generate the API Key in the Management Console under Settings > API."", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""API URL"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{placeHolder1}}"", ""placeHolderValue"": """"}]}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true, ""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Island API Key"", ""description"": ""An Island API key is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Island/Data%20Connectors/IslandAdminAPIConnector.json","true" +"Island_User_CL","Island","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Island","islandtechnologyinc1679434413850","island-sentinel-solution","2023-05-02","2023-07-20","","Island","Partner","https://www.island.io","","domains","Island_User_Polling","Island","Island Enterprise Browser User Activity (Polling CCP)","The [Island](https://www.island.io) connector provides the capability to ingest Island User Activity logs into Microsoft Sentinel.","[{""title"": ""Connect Island to Microsoft Sentinel"", ""description"": ""Provide the Island API URL and Key. API URL is https://management.island.io/api/external/v1/timeline for US or https://eu.management.island.io/api/external/v1/timeline for EU.\n Generate the API Key in the Management Console under Settings > API."", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""API URL"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{placeHolder1}}"", ""placeHolderValue"": """"}]}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true, ""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Island API Key"", ""description"": ""An Island API key is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Island/Data%20Connectors/IslandUserAPIConnector.json","true" +"Syslog","Ivanti Unified Endpoint Management","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Ivanti%20Unified%20Endpoint%20Management","azuresentinel","azure-sentinel-solution-ivantiuem","2022-07-05","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","IvantiUEM","Ivanti","[Deprecated] Ivanti Unified Endpoint Management","The [Ivanti Unified Endpoint Management](https://www.ivanti.com/products/unified-endpoint-manager) data connector provides the capability to ingest [Ivanti UEM Alerts](https://help.ivanti.com/ld/help/en_US/LDMS/11.0/Windows/alert-c-monitoring-overview.htm) into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**IvantiUEMEvent**](https://aka.ms/sentinel-ivantiuem-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**NOTE:** This data connector has been developed using Ivanti Unified Endpoint Management Release 2021.1 Version 11.0.3.374"", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Server where the Ivanti Unified Endpoint Management Alerts are forwarded.\n\n> Logs from Ivanti Unified Endpoint Management Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure Ivanti Unified Endpoint Management alert forwarding."", ""description"": ""[Follow the instructions](https://help.ivanti.com/ld/help/en_US/LDMS/11.0/Windows/alert-t-define-action.htm) to set up Alert Actions to send logs to syslog server.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Ivanti%20Unified%20Endpoint%20Management/Data%20Connectors/Ivanti_UEM_Syslog.json","true" +"JBossLogs_CL","JBoss","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/JBoss","azuresentinel","azure-sentinel-solution-jboss","2021-10-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","JBossEAP","Red Hat","[Deprecated] JBoss Enterprise Application Platform","The JBoss Enterprise Application Platform data connector provides the capability to ingest [JBoss](https://www.redhat.com/en/technologies/jboss-middleware/application-platform) events into Microsoft Sentinel. Refer to [Red Hat documentation](https://access.redhat.com/documentation/en-us/red_hat_jboss_enterprise_application_platform/7.0/html/configuration_guide/logging_with_jboss_eap) for more information.","[{""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**JBossEvent**](https://aka.ms/sentinel-jbosseap-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": "">**NOTE:** This data connector has been developed using JBoss Enterprise Application Platform 7.4.0."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the JBoss server where the logs are generated.\n\n> Logs from JBoss Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents.\n "", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the custom log directory to be collected"", ""instructions"": [{""parameters"": {""linkType"": ""OpenCustomLogsSettings""}, ""type"": ""InstallAgent""}]}, {""description"": ""1. Select the link above to open your workspace advanced settings \n2. Click **+Add custom**\n3. Click **Browse** to upload a sample of a JBoss log file (e.g. server.log). Then, click **Next >**\n4. Select **Timestamp** as the record delimiter and select Timestamp format **YYYY-MM-DD HH:MM:SS** from the dropdown list then click **Next >**\n5. Select **Windows** or **Linux** and enter the path to JBoss logs based on your configuration. Example:\n - **Linux** Directory:\n\n>Standalone server: EAP_HOME/standalone/log/server.log\n\n>Managed domain: EAP_HOME/domain/servers/SERVER_NAME/log/server.log\n\n6. After entering the path, click the '+' symbol to apply, then click **Next >** \n7. Add **JBossLogs** as the custom log Name and click **Done**""}, {""title"": ""3. Check logs in Microsoft Sentinel"", ""description"": ""Open Log Analytics to check if the logs are received using the JBossLogs_CL Custom log table.\n\n>**NOTE:** It may take up to 30 minutes before new logs will appear in JBossLogs_CL table."", ""instructions"": []}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/JBoss/Data%20Connectors/Connector_JBoss.json","true" +"jamfprotectalerts_CL","Jamf Protect","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Jamf%20Protect","jamfsoftwareaustraliaptyltd1620360395539","jamf_protect","2022-10-10","2025-09-02","","Jamf Software, LLC","Partner","https://www.jamf.com/support/","","domains","JamfProtectPush","Jamf","Jamf Protect Push Connector","The [Jamf Protect](https://www.jamf.com/products/jamf-protect/) connector provides the capability to read raw event data from Jamf Protect in Microsoft Sentinel.","[{""title"": ""1. Create ARM Resources and Provide the Required Permissions"", ""description"": ""This connector reads data from the tables that Jamf Protect uses in a Microsoft Analytics Workspace, if the [data forwarding](https://docs.jamf.com/jamf-protect/documentation/Data_Forwarding_to_a_Third_Party_Storage_Solution.html?hl=sentinel#task-4227) option is enabled in Jamf Protect then raw event data is sent to the Microsoft Sentinel Ingestion API."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Automated Configuration and Secure Data Ingestion with Entra Application \nClicking on \""Deploy\"" will trigger the creation of Log Analytics tables and a Data Collection Rule (DCR). \nIt will then create an Entra application, link the DCR to it, and set the entered secret in the application. This setup enables data to be sent securely to the DCR using an Entra token.""}}, {""parameters"": {""label"": ""Deploy Jamf Protect connector resources"", ""applicationDisplayName"": ""Jamf Protect Connector Application""}, ""type"": ""DeployPushConnectorButton""}]}, {""title"": ""2. Push your logs into the workspace"", ""description"": ""Use the following parameters to configure the your machine to send the logs to the workspace."", ""instructions"": [{""parameters"": {""label"": ""Tenant ID (Directory ID)"", ""fillWith"": [""TenantId""]}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Application ID"", ""fillWith"": [""ApplicationId""], ""placeholder"": ""Deploy push connector to get the App Registration Application ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Secret"", ""fillWith"": [""ApplicationSecret""], ""placeholder"": ""Deploy push connector to get the App Registration Secret""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Endpoint Uri"", ""fillWith"": [""DataCollectionEndpoint""], ""placeholder"": ""Deploy push connector to get the Data Collection Endpoint Uri""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Rule Immutable ID"", ""fillWith"": [""DataCollectionRuleId""], ""placeholder"": ""Deploy push connector to get the Data Collection Rule Immutable ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Unified Logs Stream Name"", ""value"": ""Custom-jamfprotectunifiedlogs""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Telemetry Stream Name"", ""value"": ""Custom-jamfprotecttelemetryv2""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Alerts Stream Name"", ""value"": ""Custom-jamfprotectalerts""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft Entra"", ""description"": ""Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher.""}, {""name"": ""Microsoft Azure"", ""description"": ""Permission to assign Monitoring Metrics Publisher role on data collection rule (DCR). Typically requires Azure RBAC Owner or User Access Administrator role""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Jamf%20Protect/Data%20Connectors/JamfProtect_ccp/connectorDefinition.json","true" +"jamfprotecttelemetryv2_CL","Jamf Protect","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Jamf%20Protect","jamfsoftwareaustraliaptyltd1620360395539","jamf_protect","2022-10-10","2025-09-02","","Jamf Software, LLC","Partner","https://www.jamf.com/support/","","domains","JamfProtectPush","Jamf","Jamf Protect Push Connector","The [Jamf Protect](https://www.jamf.com/products/jamf-protect/) connector provides the capability to read raw event data from Jamf Protect in Microsoft Sentinel.","[{""title"": ""1. Create ARM Resources and Provide the Required Permissions"", ""description"": ""This connector reads data from the tables that Jamf Protect uses in a Microsoft Analytics Workspace, if the [data forwarding](https://docs.jamf.com/jamf-protect/documentation/Data_Forwarding_to_a_Third_Party_Storage_Solution.html?hl=sentinel#task-4227) option is enabled in Jamf Protect then raw event data is sent to the Microsoft Sentinel Ingestion API."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Automated Configuration and Secure Data Ingestion with Entra Application \nClicking on \""Deploy\"" will trigger the creation of Log Analytics tables and a Data Collection Rule (DCR). \nIt will then create an Entra application, link the DCR to it, and set the entered secret in the application. This setup enables data to be sent securely to the DCR using an Entra token.""}}, {""parameters"": {""label"": ""Deploy Jamf Protect connector resources"", ""applicationDisplayName"": ""Jamf Protect Connector Application""}, ""type"": ""DeployPushConnectorButton""}]}, {""title"": ""2. Push your logs into the workspace"", ""description"": ""Use the following parameters to configure the your machine to send the logs to the workspace."", ""instructions"": [{""parameters"": {""label"": ""Tenant ID (Directory ID)"", ""fillWith"": [""TenantId""]}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Application ID"", ""fillWith"": [""ApplicationId""], ""placeholder"": ""Deploy push connector to get the App Registration Application ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Secret"", ""fillWith"": [""ApplicationSecret""], ""placeholder"": ""Deploy push connector to get the App Registration Secret""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Endpoint Uri"", ""fillWith"": [""DataCollectionEndpoint""], ""placeholder"": ""Deploy push connector to get the Data Collection Endpoint Uri""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Rule Immutable ID"", ""fillWith"": [""DataCollectionRuleId""], ""placeholder"": ""Deploy push connector to get the Data Collection Rule Immutable ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Unified Logs Stream Name"", ""value"": ""Custom-jamfprotectunifiedlogs""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Telemetry Stream Name"", ""value"": ""Custom-jamfprotecttelemetryv2""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Alerts Stream Name"", ""value"": ""Custom-jamfprotectalerts""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft Entra"", ""description"": ""Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher.""}, {""name"": ""Microsoft Azure"", ""description"": ""Permission to assign Monitoring Metrics Publisher role on data collection rule (DCR). Typically requires Azure RBAC Owner or User Access Administrator role""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Jamf%20Protect/Data%20Connectors/JamfProtect_ccp/connectorDefinition.json","true" +"jamfprotectunifiedlogs_CL","Jamf Protect","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Jamf%20Protect","jamfsoftwareaustraliaptyltd1620360395539","jamf_protect","2022-10-10","2025-09-02","","Jamf Software, LLC","Partner","https://www.jamf.com/support/","","domains","JamfProtectPush","Jamf","Jamf Protect Push Connector","The [Jamf Protect](https://www.jamf.com/products/jamf-protect/) connector provides the capability to read raw event data from Jamf Protect in Microsoft Sentinel.","[{""title"": ""1. Create ARM Resources and Provide the Required Permissions"", ""description"": ""This connector reads data from the tables that Jamf Protect uses in a Microsoft Analytics Workspace, if the [data forwarding](https://docs.jamf.com/jamf-protect/documentation/Data_Forwarding_to_a_Third_Party_Storage_Solution.html?hl=sentinel#task-4227) option is enabled in Jamf Protect then raw event data is sent to the Microsoft Sentinel Ingestion API."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Automated Configuration and Secure Data Ingestion with Entra Application \nClicking on \""Deploy\"" will trigger the creation of Log Analytics tables and a Data Collection Rule (DCR). \nIt will then create an Entra application, link the DCR to it, and set the entered secret in the application. This setup enables data to be sent securely to the DCR using an Entra token.""}}, {""parameters"": {""label"": ""Deploy Jamf Protect connector resources"", ""applicationDisplayName"": ""Jamf Protect Connector Application""}, ""type"": ""DeployPushConnectorButton""}]}, {""title"": ""2. Push your logs into the workspace"", ""description"": ""Use the following parameters to configure the your machine to send the logs to the workspace."", ""instructions"": [{""parameters"": {""label"": ""Tenant ID (Directory ID)"", ""fillWith"": [""TenantId""]}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Application ID"", ""fillWith"": [""ApplicationId""], ""placeholder"": ""Deploy push connector to get the App Registration Application ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Secret"", ""fillWith"": [""ApplicationSecret""], ""placeholder"": ""Deploy push connector to get the App Registration Secret""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Endpoint Uri"", ""fillWith"": [""DataCollectionEndpoint""], ""placeholder"": ""Deploy push connector to get the Data Collection Endpoint Uri""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Rule Immutable ID"", ""fillWith"": [""DataCollectionRuleId""], ""placeholder"": ""Deploy push connector to get the Data Collection Rule Immutable ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Unified Logs Stream Name"", ""value"": ""Custom-jamfprotectunifiedlogs""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Telemetry Stream Name"", ""value"": ""Custom-jamfprotecttelemetryv2""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Alerts Stream Name"", ""value"": ""Custom-jamfprotectalerts""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft Entra"", ""description"": ""Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher.""}, {""name"": ""Microsoft Azure"", ""description"": ""Permission to assign Monitoring Metrics Publisher role on data collection rule (DCR). Typically requires Azure RBAC Owner or User Access Administrator role""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Jamf%20Protect/Data%20Connectors/JamfProtect_ccp/connectorDefinition.json","true" +"","Joshua-Cyberiskvision","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Joshua-Cyberiskvision","almavivaspa1636563933762","joshua-cyberiskvision","2022-01-10","2022-01-10","","Joshua Cyberiskvision","Partner","https://www.cyberiskvision.com/","","domains","","","","","","","","false" +"Syslog","Juniper SRX","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Juniper%20SRX","azuresentinel","azure-sentinel-solution-junipersrx","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","JuniperSRX","Juniper","[Deprecated] Juniper SRX","The [Juniper SRX](https://www.juniper.net/us/en/products-services/security/srx-series/) connector allows you to easily connect your Juniper SRX logs with Microsoft Sentinel. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias JuniperSRX and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Juniper%20SRX/Parsers/JuniperSRX.txt), on the second line of the query, enter the hostname(s) of your JuniperSRX device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n 2. Select **Apply below configuration to my machines** and select the facilities and severities.\n 3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure and connect the Juniper SRX"", ""description"": ""1. Follow these instructions to configure the Juniper SRX to forward syslog: \n - [Traffic Logs (Security Policy Logs)](https://kb.juniper.net/InfoCenter/index?page=content&id=KB16509&actp=METADATA) \n - [System Logs](https://kb.juniper.net/InfoCenter/index?page=content&id=kb16502)\n2. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Juniper SRX"", ""description"": ""must be configured to export logs via Syslog""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Juniper%20SRX/Data%20Connectors/Connector_Syslog_JuniperSRX.json","true" +"JuniperIDP_CL","JuniperIDP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/JuniperIDP","azuresentinel","azure-sentinel-solution-juniperidp","2021-03-31","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","JuniperIDP","Juniper","[Deprecated] Juniper IDP","The [Juniper](https://www.juniper.net/) IDP data connector provides the capability to ingest [Juniper IDP](https://www.juniper.net/documentation/us/en/software/junos/idp-policy/topics/topic-map/security-idp-overview.html) events into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on Kusto Function to work as expected [**JuniperIDP**](https://aka.ms/sentinel-JuniperIDP-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**NOTE:** IDP OS 5.1 and above is supported by this data connector."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Server."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Follow the configuration steps below to get Juniper IDP logs into Microsoft Sentinel. This configuration enriches events generated by Juniper IDP module to provide visibility on log source information for Juniper IDP logs. Refer to the [Azure Monitor Documentation](https://docs.microsoft.com/azure/azure-monitor/agents/data-sources-json) for more details on these steps.\n1. Download config file [juniper_idp.conf](https://aka.ms/sentinel-JuniperIDP-conf).\n2. Login to the server where you have installed Azure Log Analytics agent.\n3. Copy juniper_idp.conf to the /etc/opt/microsoft/omsagent/**workspace_id**/conf/omsagent.d/ folder.\n4. Edit juniper_idp.conf as follows:\n\n\t i. change the listen port for receiving logs based on your configuration (line 3)\n\n\t ii. replace **workspace_id** with real value of your Workspace ID (lines 58,59,60,63)\n5. Save changes and restart the Azure Log Analytics agent for Linux service with the following command:\n\t\tsudo /opt/microsoft/omsagent/bin/service_control restart\n6. To configure a remote syslog destination, please reference the [SRX Getting Started - Configure System Logging](https://kb.juniper.net/InfoCenter/index?page=content&id=kb16502)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/JuniperIDP/Data%20Connectors/Connector_LogAnalytics_agent_JuniperIDP.json","true" +"","KQL Training","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/KQL%20Training","microsoftsentinelcommunity","azure-sentinel-solution-kqltraining","2022-11-30","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","","","","","","","","false" +"KeeperSecurityEventNewLogs_CL","Keeper Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Keeper%20Security","keepersecurity","keeper-security-integration","2025-06-03","2025-06-03","","Keeper Security","Partner","https://www.keepersecurity.com","","domains","KeeperSecurityPush2","Keeper Security","Keeper Security Push Connector","The [Keeper Security](https://keepersecurity.com) connector provides the capability to read raw event data from Keeper Security in Microsoft Sentinel.","[{""title"": ""1. Create ARM Resources and Provide the Required Permissions"", ""description"": ""This connector reads data from the tables that Keeper Security uses in a Microsoft Analytics Workspace, if the [data forwarding](https://docs.keepersecurity.com/docs/data-forwarding) option is enabled in Keeper Security then raw event data is sent to the Microsoft Sentinel Ingestion API."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Automated Configuration and Secure Data Ingestion with Entra Application \nClicking on \""Deploy\"" will trigger the creation of Log Analytics tables and a Data Collection Rule (DCR). \nIt will then create an Entra application, link the DCR to it, and set the entered secret in the application. This setup enables data to be sent securely to the DCR using an Entra token.""}}, {""parameters"": {""label"": ""Keeper Security connector resources"", ""applicationDisplayName"": ""Keeper Security Connector Application""}, ""type"": ""DeployPushConnectorButton""}]}, {""title"": ""2. Push your logs into the workspace"", ""description"": ""Use the following parameters to configure the your machine to send the logs to the workspace."", ""instructions"": [{""parameters"": {""label"": ""Tenant ID (Directory ID)"", ""fillWith"": [""TenantId""]}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Application ID"", ""fillWith"": [""ApplicationId""], ""placeholder"": ""Deploy push connector to get the App Registration Application ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Secret"", ""fillWith"": [""ApplicationSecret""], ""placeholder"": ""Deploy push connector to get the App Registration Secret""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Endpoint Uri"", ""fillWith"": [""DataCollectionEndpoint""], ""placeholder"": ""Deploy push connector to get the Data Collection Endpoint Uri""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Rule Immutable ID"", ""fillWith"": [""DataCollectionRuleId""], ""placeholder"": ""Deploy push connector to get the Data Collection Rule Immutable ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Events Logs Stream Name"", ""value"": ""Custom-KeeperSecurityEventNewLogs""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Update Keeper Admin Console"", ""description"": ""Configure the Keeper Admin Console with the Azure connection details to enable data forwarding to Microsoft Sentinel."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configure Azure Monitor Logs in Keeper Admin Console\n\nIn the [Keeper Admin Console](https://keepersecurity.com/console/), login as the Keeper Administrator. Then go to **Reporting & Alerts** and select **Azure Monitor Logs**.\n\nProvide the following information from Step 2 above into the Admin Console:\n\n- **Azure Tenant ID**: You can find this from Azure's \""Subscriptions\"" area.\n- **Application (client) ID**: This is located in the App registration (KeeperLogging) overview screen\n- **Client Secret Value**: This is the Client Secret Value from the app registration secrets.\n- **Endpoint URL**: This is a URL that is created in the following specific format:\n `https:///dataCollectionRules//streams/
?api-version=2023-01-01`\n\nTo assemble the Endpoint URL:\n\n- **** This comes from Step 2 above\n- **** From the Data Collector Rule, copy the \""Immutable Id\"" value, e.g. `dcr-xxxxxxx`\n- **
** This is the table name created by Azure, e.g. `Custom-KeeperSecurityEventNewLogs`\n\nExample: `https:///dataCollectionRules//streams/Custom-KeeperSecurityEventNewLogs?api-version=2023-01-01`""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft Entra"", ""description"": ""Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher.""}, {""name"": ""Microsoft Azure"", ""description"": ""Permission to assign Monitoring Metrics Publisher role on data collection rule (DCR). Typically requires Azure RBAC Owner or User Access Administrator role""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Keeper%20Security/Data%20Connectors/KeeperSecurity_ccp/KepperSecurity_Definition.json","true" +"LastPassNativePoller_CL","LastPass","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/LastPass","thecollectiveconsultingbv1584980370320","lastpass-enterprise-monitoring-solution","2021-10-20","2022-01-12","","The Collective Consulting","Partner","https://thecollective.eu","","domains","LastPass_Polling","The Collective Consulting BV","LastPass Enterprise - Reporting (Polling CCP)","The [LastPass Enterprise](https://www.lastpass.com/products/enterprise-password-management-and-sso) connector provides the capability to LastPass reporting (audit) logs into Microsoft Sentinel. The connector provides visibility into logins and activity within LastPass (such as reading and removing passwords).","[{""title"": ""Connect LastPass Enterprise to Microsoft Sentinel"", ""description"": ""Provide the LastPass Provisioning API Key."", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""cid"", ""requestObjectKey"": ""queryParametersTemplate"", ""placeHolderName"": ""{{cidPlaceHolder}}""}]}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true, ""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""LastPass API Key and CID"", ""description"": ""A LastPass API key and CID are required. [See the documentation to learn more about LastPass API](https://support.logmeininc.com/lastpass/help/use-the-lastpass-provisioning-api-lp010068).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/LastPass/Data%20Connectors/LastPassAPIConnector.json","true" +"","Legacy IOC based Threat Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Legacy%20IOC%20based%20Threat%20Protection","azuresentinel","azure-sentinel-solution-ioclegacy","2022-12-19","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"Lookout_CL","Lookout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Lookout","lookoutinc","lookout_mtd_sentinel","2021-10-18","","","Lookout","Partner","https://www.lookout.com/support","","domains","LookoutAPI","Lookout","[DEPRECATED] Lookout","The [Lookout](https://lookout.com) data connector provides the capability to ingest [Lookout](https://enterprise.support.lookout.com/hc/en-us/articles/115002741773-Mobile-Risk-API-Guide#commoneventfields) events into Microsoft Sentinel through the Mobile Risk API. Refer to [API documentation](https://enterprise.support.lookout.com/hc/en-us/articles/115002741773-Mobile-Risk-API-Guide) for more information. The [Lookout](https://lookout.com) data connector provides ability to get events which helps to examine potential security risks and more.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This [Lookout](https://lookout.com) data connector uses Azure Functions to connect to the Mobile Risk API to pull its events into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**LookoutEvents**](https://aka.ms/sentinel-lookoutapi-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Mobile Risk API**\n\n [Follow the instructions](https://enterprise.support.lookout.com/hc/en-us/articles/115002741773-Mobile-Risk-API-Guide#authenticatingwiththemobileriskapi) to obtain the credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - Follow below mentioned instructions to deploy the [Lookout](https://lookout.com) data connector and the associated Azure Function**\n\n>**IMPORTANT:** Before starting the deployment of the [Lookout](https://lookout.com) data connector, make sure to have the Workspace ID and Workspace Key ready (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Workspace Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Follow below steps for automated deployment of the [Lookout](https://lookout.com) data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-lookoutapi-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Region**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **Function Name**, **Workspace ID**,**Workspace Key**,**Enterprise Name** & **Api Key** and deploy. \n4. Click **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Mobile Risk API Credentials/permissions"", ""description"": ""**EnterpriseName** & **ApiKey** are required for Mobile Risk API. [See the documentation to learn more about API](https://enterprise.support.lookout.com/hc/en-us/articles/115002741773-Mobile-Risk-API-Guide). Check all [requirements and follow the instructions](https://enterprise.support.lookout.com/hc/en-us/articles/115002741773-Mobile-Risk-API-Guide#authenticatingwiththemobileriskapi) for obtaining credentials.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Lookout/Data%20Connectors/Lookout_API_FunctionApp.json","true" +"LookoutMtdV2_CL","Lookout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Lookout","lookoutinc","lookout_mtd_sentinel","2021-10-18","","","Lookout","Partner","https://www.lookout.com/support","","domains","LookoutStreaming_Definition","Microsoft","Lookout Mobile Threat Detection Connector (via Codeless Connector Framework) (Preview)","The [Lookout Mobile Threat Detection](https://lookout.com) data connector provides the capability to ingest events related to mobile security risks into Microsoft Sentinel through the Mobile Risk API. Refer to [API documentation](https://enterprise.support.lookout.com/hc/en-us/articles/115002741773-Mobile-Risk-API-Guide) for more information. This connector helps you examine potential security risks detected in mobile devices.","[{""title"": ""Connect Lookout Mobile Threat Defence connector to Microsoft Sentinel"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""Before connecting to Lookout, ensure the following prerequisites are completed.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. **ApiKey** is required for Mobile Threat Detection API. See the [documentation](https://enterprise.support.lookout.com/hc/en-us/articles/115002741773-Mobile-Risk-API-Guide) to learn more about API. Check all requirements and follow the [instructions](https://enterprise.support.lookout.com/hc/en-us/articles/115002741773-Mobile-Risk-API-Guide#authenticatingwiththemobileriskapi) for obtaining credentials.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API key"", ""placeholder"": ""Enter your API key "", ""type"": ""password"", ""name"": ""applicationKey"", ""required"": true}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""toggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": false, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Lookout/Data%20Connectors/LookoutStreamingConnector_ccp/LookoutStreaming_DataConnectorDefinition.json","true" +"LookoutCloudSecurity_CL","Lookout Cloud Security Platform for Microsoft Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Lookout%20Cloud%20Security%20Platform%20for%20Microsoft%20Sentinel","lookoutinc","lookout_cloudsecurity_sentinel","2023-02-17","","","Lookout","Partner","https://www.lookout.com/support","","domains","LookoutCloudSecurityDataConnector","Lookout","Lookout Cloud Security for Microsoft Sentinel","This connector uses a Agari REST API connection to push data into Microsoft Sentinel Log Analytics.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Agari REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**Step-by-Step Instructions**\n\n As a prerequisite to this integration, first, you need to configure an API client on Lookout's Management Console. From the Management Console, you can add one or more clients and configure the appropriate permissions and actions for each. \n\n 1. Name - The name given to this client. \n\n 2. Client ID - the unique ID that was provided for this client. \n\n 3. Permissions - The permissions enabled for this client. The permissions you check are those that the client will be allowed to access. The listed options are Activity, Violation, Anomaly, Insights, and Profile \n\n 4. Service URL - The URL used to access this client.It must start with https:// \n\n 5. Authorized IPs - The valid IP address or addresses that apply to this client. \n\n 6. Actions - The actions you can take for this client. Click the icon for the action you want to perform. Editing client information, displaying the client secret, or deleting the client. \n\n **To add a new API client:** \n\n 1. Go to Administration > Enterprise Integration > API Clients and click New. \n\n 2. Enter a Name (required) and a Description (optional). \n\n 3. Enter the Client ID that was provided to you. \n\n 4. Select one or more Permissions from the dropdown list. \n\n 5. Enter one or more Authorized IP addresses for this client. Separate each address with a comma.\n\n 6. Click Save. \n\n When prompted, copy the string for the client's secret. You will need this information (along with the client ID) to authenticate to the API gateway. ""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as Azure Blob Storage connection string and container name, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-LookoutCS-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Lookout Client ID**, **Lookout Client Secret**, **Lookout Base url**, **Microsoft Sentinel Workspace Id**, **Microsoft Sentinel Shared Key**\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-Lookout-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions.\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tLookoutClientId\n\t\tLookoutApiSecret\n\t\tBaseurl\n\t\tWorkspaceID\n\t\tPrimaryKey\n\t\tlogAnalyticsUri (Optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://WORKSPACE_ID.ods.opinsights.azure.us`. \n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Lookout%20Cloud%20Security%20Platform%20for%20Microsoft%20Sentinel/Data%20Connectors/LookoutCSConnector/LookoutCloudSecurityConnector_API_FunctionApp.json","true" +"ThreatIntelIndicators","Lumen Defender Threat Feed","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Lumen%20Defender%20Threat%20Feed","centurylink","azure-sentinel-solution-lumen-defender-threat-feed","2025-09-12","2025-09-12","","Lumen Technologies, Inc.","Partner","https://www.lumen.com/en-us/contact-us/support.html","","domains","LumenThreatFeedConnector","Lumen Technologies, Inc.","Lumen Defender Threat Feed Data Connector","The [Lumen Defender Threat Feed](https://bll-analytics.mss.lumen.com/analytics) connector provides the capability to ingest STIX-formatted threat intelligence indicators from Lumen's Black Lotus Labs research team into Microsoft Sentinel. The connector automatically downloads and uploads daily threat intelligence indicators including IPv4 addresses and domains to the ThreatIntelIndicators table via the STIX Objects Upload API.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions with Durable Functions to connect to the Lumen Defender Threat Feed API and upload threat intelligence indicators to Microsoft Sentinel via the STIX Objects API. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": ""Configuration"", ""description"": ""**STEP 1 - Obtain Lumen Defender Threat Feed API Key**\n\n1. [Contact Lumen](mailto:DefenderThreatFeedSales@Lumen.com?subject=API%20Access%20Request) to obtain API access to our Threat Feed API service\n2. Obtain your API key for authentication.""}, {""title"": """", ""description"": ""**STEP 2 - Configure Azure Entra ID Application and gather information**\n\n1. Create an Entra application. [See the documentation for a guide to registering an application in Microsoft Entra ID.](https://learn.microsoft.com/en-us/entra/identity-platform/quickstart-register-app)\n2. Create a client secret and note the Application ID, Tenant ID, and Client Secret\n4. Assign the **Microsoft Sentinel Contributor** role to the application on your Microsoft Sentinel Log Analytics Workspace\n5. Make note of your Workspace ID, as well as the App Insights Workspace Resource ID, which can be obtained from the overview page of the Log Analytics Workspace for your Microsoft Sentinel instance. Click on the \u201cJSON View\u201d link in the top right and the Resource ID will be displayed at the top with a copy button."", ""instructions"": [{""parameters"": {""fillWith"": [""TenantId""], ""label"": ""Tenant ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": ""**STEP 3 - Enable the Threat Intelligence Upload Indicators API (Preview) data connector in Microsoft Sentinel**\n\n1. Deploy the **Threat Intelligence (New) Solution**, which includes the **Threat Intelligence Upload Indicators API (Preview)**\n2. Browse to the Content Hub, find and select the **Threat Intelligence (NEW)** solution.\n3. Select the **Install/Update** button.""}, {""title"": """", ""description"": ""**STEP 4 - Deploy the Azure Function**\n\n**IMPORTANT:** Before deploying the Lumen Defender Threat Feed connector, have the Tenant ID, Workspace ID, App Insights Workspace Resource ID, Azure Entra application details (Client ID, Client Secret), and Lumen API key readily available.\n\n1. Click the Deploy to Azure button.\n\n[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FLumen%20Defender%20Threat%20Feed%2FData%2520Connectors%2FLumenThreatFeed%2Fazuredeploy_Connector_LumenThreatFeed_AzureFunction.json)\n\n2. Fill in the appropriate values for each parameter:\n\n- Subscription: Confirm the correct subscription is selected or use the dropdown to change your selection\n- Resource Group: Select the resource group to be used by the Function App and related resources\n- Function Name: Enter a globally unique name with an 11-character limit. Adhere to your organization\u2019s naming convention and ensure the name is globally unique since it is used (along with the uniqueString() function) to identify the ARM template being deployed.\n- Workspace ID: Found in the \""Overview\"" tab for the Log Analytics Workspace of the Microsoft Sentinel instance and provided for convenience on the connector information page.\n- Lumen API Key: Obtain an API key through Lumen support\n- Lumen Base URL: Filled in automatically and should generally not be changed. This URL contains API endpoints used by the connector\n- Tenant ID: Obtained from the Entra App Registration overview page for the registered application (listed as Directory ID) and can also be obtained from the Tenant Information page in Azure\n- Client ID: Obtained from the Entra App Registration overview page for the registered application (listed as Application ID)\n- Client Secret: Obtained when the secret is created during the app registration process. It can only be viewed when first created and is hidden permanently afterwards. Rerun the app registration process to obtain a new Client Secret if necessary.\n- App Insights Workspace Resource ID: Obtained from the overview page of the Log Analytics Workspace for your Microsoft Sentinel instance. Click on the \""JSON View\"" link in the top right and the Resource ID will be displayed at the top with a copy button.\n- Blob Container Name: Use the default name unless otherwise required. Azure Blob Storage is used for temporary storage and processing of threat indicators.""}, {""title"": """", ""description"": ""**STEP 5 - Verify Deployment**\n\n1. The connector polls for indicator updates every 15 minutes.\n2. Monitor the Function App logs in the Azure Portal to verify successful execution\n3. After the app performs its first run, review the indicators ingested by either viewing the \u201cLumen Defender Threat Feed Overview\u201d workbook or viewing the \u201cThreat Intelligence\u201d section in Microsoft Sentinel. In Microsoft Sentinel \u201cThreat Intelligence\u201d, filter for source \u201cLumen\u201d to display only Lumen generated indicators.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and write permissions on the Log Analytics workspace are required."", ""providerDisplayName"": ""Log Analytics Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": false}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Azure Entra App Registration"", ""description"": ""An Azure Entra application registration with the Microsoft Sentinel Contributor role assigned is required for STIX Objects API access. [See the documentation to learn more about Azure Entra applications](https://docs.microsoft.com/azure/active-directory/develop/quickstart-register-app).""}, {""name"": ""Microsoft Sentinel Contributor Role"", ""description"": ""Microsoft Sentinel Contributor role is required for the Azure Entra application to upload threat intelligence indicators.""}, {""name"": ""Lumen Defender Threat Feed API Key"", ""description"": ""A Lumen Defender Threat Feed API Key is required for accessing threat intelligence data. [Contact Lumen for API access](mailto:DefenderThreatFeedSales@Lumen.com?subject=API%20Access%20Request).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Lumen%20Defender%20Threat%20Feed/Data%20Connectors/LumenThreatFeed/LumenThreatFeedConnector_ConnectorUI.json","true" +"ThreatIntelligenceIndicator","MISP2Sentinel","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MISP2Sentinel","microsoftsentinelcommunity","azure-sentinel-solution-misp2sentinel","2023-07-29","2023-07-29","","Community","Community","https://github.com/cudeso/misp2sentinel","","domains,verticals","MISP2SentinelConnector","MISP project & cudeso.be","MISP2Sentinel","This solution installs the MISP2Sentinel connector that allows you to automatically push threat indicators from MISP to Microsoft Sentinel via the Upload Indicators REST API. After installing the solution, configure and enable this data connector by following guidance in Manage solution view.","[{""title"": ""Installation and setup instructions"", ""description"": ""Use the documentation from this GitHub repository to install and configure the MISP to Microsoft Sentinel connector: \n\nhttps://github.com/cudeso/misp2sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.SecurityInsights/threatintelligence/write"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MISP2Sentinel/Data%20Connectors/MISP2SentinelConnector_UploadIndicatorsAPI.json","true" +"MailGuard365_Threats_CL","MailGuard 365","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MailGuard%20365","mailguardptylimited","microsoft-sentinel-solution-mailguard365","2023-05-09","2023-06-08","","MailGuard 365","Partner","https://www.mailguard365.com/support/","","domains","MailGuard365","MailGuard365","MailGuard 365","MailGuard 365 Enhanced Email Security for Microsoft 365. Exclusive to the Microsoft marketplace, MailGuard 365 is integrated with Microsoft 365 security (incl. Defender) for enhanced protection against advanced email threats like phishing, ransomware and sophisticated BEC attacks.","[{""title"": ""Configure and connect MailGuard 365"", ""description"": ""1. In the MailGuard 365 Console, click **Settings** on the navigation bar.\n2. Click the **Integrations** tab.\n3. Click the **Enable Microsoft Sentinel**.\n4. Enter your workspace id and primary key from the fields below, click **Finish**.\n5. For additional instructions, please contact MailGuard 365 support."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MailGuard%20365/Data%20Connectors/MailGuard365.json","true" +"MailRiskEventEmails_CL","MailRisk","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MailRisk","securepracticeas1650887373770","microsoft-sentinel-solution-mailrisk","2023-03-16","2025-10-27","","Secure Practice","Partner","https://securepractice.co/support","","domains","SecurePracticeMailRiskConnector","Secure Practice","MailRisk by Secure Practice","The MailRisk by Secure Practice connector allows you to ingest email threat intelligence data from the MailRisk API into Microsoft Sentinel. This connector provides visibility into reported emails, risk assessments, and security events related to email threats.","[{""title"": ""1. Obtain Secure Practice API Credentials"", ""description"": ""Log in to your Secure Practice account and generate an API Key and API Secret if you haven't already.""}, {""title"": ""2. Connect to MailRisk API"", ""description"": ""Enter your Secure Practice API credentials below. The credentials will be securely stored and used to authenticate API requests."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""Enter your Secure Practice API Key"", ""type"": ""text"", ""name"": ""apiKey""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Secret"", ""placeholder"": ""Enter your Secure Practice API Secret"", ""type"": ""password"", ""name"": ""apiSecret""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""disconnectLabel"": ""Disconnect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": false, ""action"": false}}], ""customs"": [{""name"": ""API credentials"", ""description"": ""Your Secure Practice API key pair is also needed, which are created in the [settings in the admin portal](https://manage.securepractice.co/settings/security). Generate a new key pair with description `Microsoft Sentinel`.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MailRisk/Data%20Connectors/MailRisk_CCP/MailRisk_ConnectorDefinition.json","true" +"","Malware Protection Essentials","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Malware%20Protection%20Essentials","azuresentinel","azure-sentinel-solution-malwareprotection","2023-09-25","2023-09-25","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","MarkLogicAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MarkLogicAudit","azuresentinel","azure-sentinel-solution-marklogicaudit","2022-08-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","MaturityModelForEventLogManagementM2131","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MaturityModelForEventLogManagementM2131","azuresentinel","azure-sentinel-solution-maturitymodelforeventlogma","2021-12-05","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"Syslog","McAfee Network Security Platform","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/McAfee%20Network%20Security%20Platform","azuresentinel","azure-sentinel-solution-mcafeensp","2021-06-29","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","McAfeeNSP","McAfee","[Deprecated] McAfee Network Security Platform","The [McAfee® Network Security Platform](https://www.mcafee.com/enterprise/en-us/products/network-security-platform.html) data connector provides the capability to ingest [McAfee® Network Security Platform events](https://docs.mcafee.com/bundle/network-security-platform-10.1.x-integration-guide-unmanaged/page/GUID-8C706BE9-6AC9-4641-8A53-8910B51207D8.html) into Microsoft Sentinel. Refer to [McAfee® Network Security Platform](https://docs.mcafee.com/bundle/network-security-platform-10.1.x-integration-guide-unmanaged/page/GUID-F7D281EC-1CC9-4962-A7A3-5A9D9584670E.html) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**McAfeeNSPEvent**](https://aka.ms/sentinel-mcafeensp-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**NOTE:** This data connector has been developed using McAfee\u00ae Network Security Platform version: 10.1.x"", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Server where the McAfee\u00ae Network Security Platform logs are forwarded.\n\n> Logs from McAfee\u00ae Network Security Platform Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure McAfee\u00ae Network Security Platform event forwarding"", ""description"": ""Follow the configuration steps below to get McAfee\u00ae Network Security Platform logs into Microsoft Sentinel.\n1. [Follow these instructions](https://docs.mcafee.com/bundle/network-security-platform-10.1.x-product-guide/page/GUID-E4A687B0-FAFB-4170-AC94-1D968A10380F.html) to forward alerts from the Manager to a syslog server.\n2. Add a syslog notification profile, [more details here](https://docs.mcafee.com/bundle/network-security-platform-10.1.x-product-guide/page/GUID-5BADD5D7-21AE-4E3B-AEE2-A079F3FD6A38.html). This is mandatory. While creating profile, to make sure that events are formatted correctly, enter the following text in the Message text box:\n\t\t:|SENSOR_ALERT_UUID|ALERT_TYPE|ATTACK_TIME|ATTACK_NAME|ATTACK_ID\n\t\t|ATTACK_SEVERITY|ATTACK_SIGNATURE|ATTACK_CONFIDENCE|ADMIN_DOMAIN|SENSOR_NAME|INTERFACE\n\t\t|SOURCE_IP|SOURCE_PORT|DESTINATION_IP|DESTINATION_PORT|CATEGORY|SUB_CATEGORY\n\t\t|DIRECTION|RESULT_STATUS|DETECTION_MECHANISM|APPLICATION_PROTOCOL|NETWORK_PROTOCOL|""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/McAfee%20Network%20Security%20Platform/Data%20Connectors/McAfeeNSP.json","true" +"Syslog","McAfee ePolicy Orchestrator","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/McAfee%20ePolicy%20Orchestrator","azuresentinel","azure-sentinel-solution-mcafeeepo","2021-03-25","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","McAfeeePO","McAfee","[Deprecated] McAfee ePolicy Orchestrator (ePO)","The McAfee ePolicy Orchestrator data connector provides the capability to ingest [McAfee ePO](https://www.mcafee.com/enterprise/en-us/products/epolicy-orchestrator.html) events into Microsoft Sentinel through the syslog. Refer to [documentation](https://docs.mcafee.com/bundle/epolicy-orchestrator-landing/page/GUID-0C40020F-5B7F-4549-B9CC-0E017BC8797F.html) for more information.","[{""title"": """", ""description"": "">This data connector depends on a parser based on a Kusto Function to work as expected [**McAfeeEPOEvent**](https://aka.ms/sentinel-McAfeeePO-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n2. Select **Apply below configuration to my machines** and select the facilities and severities.\n3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure McAfee ePolicy Orchestrator event forwarding to Syslog server"", ""description"": ""[Follow these instructions](https://kcm.trellix.com/corporate/index?page=content&id=KB87927) to add register syslog server.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/McAfee%20ePolicy%20Orchestrator/Data%20Connectors/Connector_McAfee_ePO.json","true" +"","Microsoft 365","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20365","azuresentinel","azure-sentinel-solution-office365","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"FinanceOperationsActivity_CL","Microsoft Business Applications","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Business%20Applications","sentinel4dynamics365","powerplatform","2023-04-19","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","Dynamics365Finance","Microsoft","Dynamics 365 Finance and Operations","Dynamics 365 for Finance and Operations is a comprehensive Enterprise Resource Planning (ERP) solution that combines financial and operational capabilities to help businesses manage their day-to-day operations. It offers a range of features that enable businesses to streamline workflows, automate tasks, and gain insights into operational performance.

The Dynamics 365 Finance and Operations data connector ingests Dynamics 365 Finance and Operations admin activities and audit logs as well as user business process and application activities logs into Microsoft Sentinel.","[{""description"": "">Connectivity to Finance and Operations requires a Microsoft Entra app registration (client ID and secret). You'll also need the Microsoft Entra tenant ID and the Finance Operations Organization URL.""}, {""description"": ""To enable data collection, create a role in Dynamics 365 Finance and Operations with permissions to view the Database Log entity. Assign this role to a dedicated Finance and Operations user, mapped to the client ID of a Microsoft Entra app registration. Follow these steps to complete the process:""}, {""title"": ""Step 1 - Microsoft Entra app registration"", ""description"": ""1. Navigate to the [Microsoft Entra portal](https://entra.microsoft.com). \n2. Under Applications, click on **App Registrations** and create a new app registration (leave all defaults).\n3. Open the new app registration and create a new secret.\n4. Retain the **Tenant ID**, **Application (client) ID**, and **Client secret** for later use.""}, {""title"": ""Step 2 - Create a role for data collection in Finance and Operations"", ""description"": ""1. In the Finance and Operations portal, navigate to **Workspaces > System administration** and click **Security Configuration**\n2. Under **Roles** click **Create new** and give the new role a name e.g. Database Log Viewer.\n3. Select the new role in the list of roles and click **Privileges** and than **Add references**.\n4. Select **Database log Entity View** from the list of privileges.\n5. Click on **Unpublished objects** and then **Publish all** to publish the role.""}, {""title"": ""Step 3 - Create a user for data collection in Finance and Operations"", ""description"": ""1. In the Finance and Operations portal, navigate to **Modules > System administration** and click **Users**\n2. Create a new user and assign the role created in the previous step to the user.""}, {""title"": ""Step 4 - Register the Microsoft Entra app in Finance and Operations"", ""description"": ""1. In the F&O portal, navigate to **System administration > Setup > Microsoft Entra applications** (Azure Active Directory applications)\n2. Create a new entry in the table. In the **Client Id** field, enter the application ID of the app registered in Step 1.\n3. In the **Name** field, enter a name for the application.\n4. In the **User ID** field, select the user ID created in the previous step.""}, {""description"": ""Connect using client credentials"", ""title"": ""Connect events from Dyanmics 365 Finance and Operations to Microsoft Sentinel"", ""instructions"": [{""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""label"": ""Add environment"", ""isPrimary"": true, ""title"": ""Dynamics 365 Finance and Operations connection"", ""instructionSteps"": [{""title"": ""Environment details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Microsoft Entra tenant ID."", ""placeholder"": ""Tenant ID (GUID)"", ""type"": ""text"", ""name"": ""tenantId""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""App registration client ID"", ""placeholder"": ""Finance and Operations client ID"", ""type"": ""text"", ""name"": ""clientId""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""App registration client secret"", ""placeholder"": ""Finance and Operations client secret"", ""type"": ""password"", ""name"": ""clientSecret""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Finance and Operations organization URL"", ""placeholder"": ""https://dynamics-dev.axcloud.dynamics.com"", ""type"": ""text"", ""name"": ""auditHost""}}]}]}}]}, {""title"": ""Organizations"", ""description"": ""Each row represents an Finance and Operations connection"", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Environment URL"", ""columnValue"": ""properties.request.apiEndpoint""}], ""menuItems"": [""DeleteConnector""]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft Entra app registration"", ""description"": ""Application client ID and secret used to access Dynamics 365 Finance and Operations.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Business%20Applications/Data%20Connectors/DynamicsFinOpsPollerConnector/DynamicsFinOps_DataConnectorDefinition.json","true" +"LLMActivity","Microsoft Copilot","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Copilot","azuresentinel","azure-sentinel-solution-microsoftcopilot","2025-10-01","","","Microsoft","Microsoft","https://support.microsoft.com","","domains","MicrosoftCopilot","Microsoft","Microsoft Copilot","The Microsoft Copilot logs connector in Microsoft Sentinel enables the seamless ingestion of Copilot-generated activity logs into Microsoft Sentinel for advanced threat detection, investigation, and response. It collects telemetry from Microsoft Copilot services - such as usage data, prompts and system responses - and ingests into Microsoft Sentinel, allowing security teams to monitor for misuse, detect anomalies, and maintain compliance with organizational policies.","[{""title"": ""Connect Microsoft Copilot audit logs to Microsoft Sentinel"", ""description"": ""This connector uses the Office Management API to get your Microsoft Copilot audit logs. The logs will be stored and processed in your existing Microsoft Sentinel workspace. You can find the data in the **LLMActivity** table."", ""instructions"": [{""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""toggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Tenant Permissions"", ""description"": ""'Security Administrator' or 'Global Administrator' on the workspace's tenant.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Copilot/Data%20Connectors/MicrosoftCopilot_ConnectorDefinition.json","true" +"","Microsoft Defender For Identity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20For%20Identity","azuresentinel","azure-sentinel-solution-mdefenderforidentity","2022-04-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"","Microsoft Defender Threat Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20Threat%20Intelligence","azuresentinel","azure-sentinel-solution-microsoftdefenderthreatint","2023-03-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","Microsoft Defender XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20XDR","azuresentinel","azure-sentinel-solution-microsoft365defender","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"SecurityAlert","Microsoft Defender for Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20for%20Cloud","azuresentinel","azure-sentinel-solution-microsoftdefenderforcloud","2022-05-17","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MicrosoftDefenderForCloudTenantBased","Microsoft","Tenant-based Microsoft Defender for Cloud","Microsoft Defender for Cloud is a security management tool that allows you to detect and quickly respond to threats across Azure, hybrid, and multi-cloud workloads. This connector allows you to stream your MDC security alerts from Microsoft 365 Defender into Microsoft Sentinel, so you can can leverage the advantages of XDR correlations connecting the dots across your cloud resources, devices and identities and view the data in workbooks, queries and investigate and respond to incidents. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2269832&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Connect Tenant-based Microsoft Defender for Cloud to Microsoft Sentinel"", ""description"": ""After connecting this connector, **all** your Microsoft Defender for Cloud subscriptions' alerts will be sent to this Microsoft Sentinel workspace.\n\n> Your Microsoft Defender for Cloud alerts are connected to stream through the Microsoft 365 Defender. To benefit from automated grouping of the alerts into incidents, connect the Microsoft 365 Defender incidents connector. Incidents can be viewed in the incidents queue."", ""instructions"": [{""parameters"": {""title"": ""Tenant-based Microsoft Defender for Cloud"", ""connectorKind"": ""MicrosoftDefenderForCloudTenantBased"", ""enable"": true, ""newPipelineEnabledFeatureFlagConfig"": {""feature"": ""MdcAlertsByMtp"", ""featureStates"": {""1"": 2, ""2"": 2, ""3"": 2, ""4"": 2, ""5"": 2}}, ""infoBoxMessage"": ""Your Microsoft Defender for Cloud alerts are connected to stream through the Microsoft 365 Defender. To benefit from automated grouping of the alerts into incidents, connect the Microsoft 365 Defender incidents connector. Incidents can be viewed in the incidents queue"", ""shouldAlwaysDisplayInfoMessage"": true}, ""type"": ""MicrosoftDefenderForCloudTenantBased""}]}]","{""tenant"": [""SecurityAdmin"", ""GlobalAdmin""], ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""tenantMember"": true}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20for%20Cloud/Data%20Connectors/MicrosoftDefenderForCloudTenantBased.json","true" +"","Microsoft Defender for Cloud Apps","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20for%20Cloud%20Apps","azuresentinel","azure-sentinel-solution-microsoftdefendercloudapps","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"SecurityAlert","Microsoft Defender for Office 365","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20for%20Office%20365","azuresentinel","azure-sentinel-solution-microsoftdefenderforo365","2022-05-17","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","OfficeATP","Microsoft","Microsoft Defender for Office 365 (Preview)","Microsoft Defender for Office 365 safeguards your organization against malicious threats posed by email messages, links (URLs) and collaboration tools. By ingesting Microsoft Defender for Office 365 alerts into Microsoft Sentinel, you can incorporate information about email- and URL-based threats into your broader risk analysis and build response scenarios accordingly.

The following types of alerts will be imported:

- A potentially malicious URL click was detected
- Email messages containing malware removed after delivery
- Email messages containing phish URLs removed after delivery
- Email reported by user as malware or phish
- Suspicious email sending patterns detected
- User restricted from sending email

These alerts can be seen by Office customers in the ** Office Security and Compliance Center**.

For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2219942&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Connect Microsoft Defender for Office 365 alerts to Microsoft Sentinel"", ""description"": ""Connecting Microsoft Defender for Office 365 will cause your data that is collected by Microsoft Defender for Office 365 service to be stored and processed in the location that you have configured your Microsoft Sentinel workspace."", ""instructions"": [{""parameters"": {""connectorKind"": ""OfficeATP"", ""title"": ""Microsoft Defender for Office 365"", ""enable"": true}, ""type"": ""SentinelResourceProvider""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""tenant"": [""GlobalAdmin"", ""SecurityAdmin""], ""licenses"": [""OfficeATP""]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Defender%20for%20Office%20365/Data%20Connectors/template_OfficeATP.json","true" +"","Microsoft Entra ID","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID","azuresentinel","azure-sentinel-solution-azureactivedirectory","2022-05-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"","Microsoft Entra ID Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Entra%20ID%20Protection","azuresentinel","azure-sentinel-solution-azureactivedirectoryip","2022-05-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"Event","Microsoft Exchange Security - Exchange On-Premises","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises","microsoftsentinelcommunity","azure-sentinel-solution-exchangesecurityinsights","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ESI-ExchangeAdminAuditLogEvents","Microsoft","[Deprecated] Microsoft Exchange Logs and Events","Deprecated, use the 'ESI-Opt' dataconnectors. You can stream all Exchange Audit events, IIS Logs, HTTP Proxy logs and Security Event logs from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you to view dashboards, create custom alerts, and improve investigation. This is used by Microsoft Exchange Security Workbooks to provide security insights of your On-Premises Exchange environment","[{""description"": "">**NOTE:** This solution is based on options. This allows you to choose which data will be ingest as some options can generate a very high volume of data. Depending on what you want to collect, track in your Workbooks, Analytics Rules, Hunting capabilities you will choose the option(s) you will deploy. Each options are independant for one from the other. To learn more about each option: ['Microsoft Exchange Security' wiki](https://aka.ms/ESI_DataConnectorOptions)""}, {""title"": ""1. Download and install the agents needed to collect logs for Microsoft Sentinel"", ""description"": ""Type of servers (Exchange Servers, Domain Controllers linked to Exchange Servers or all Domain Controllers) depends on the option you want to deploy."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Deploy Monitor Agents"", ""description"": ""This step is required only if it's the first time you onboard your Exchange Servers/Domain Controllers"", ""instructions"": [{""parameters"": {""title"": ""Select which agent you want to install in your servers to collect logs:"", ""instructionSteps"": [{""title"": ""[Prefered] Azure Monitor Agent via Azure Arc"", ""description"": ""**Deploy the Azure Arc Agent**\n> [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""title"": ""Install Azure Log Analytics Agent (Deprecated on 31/08/2024)"", ""description"": ""1. Download the Azure Log Analytics Agent and choose the deployment method in the below link."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Deploy log injestion following choosed options"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""[Option 1] MS Exchange Management Log collection"", ""description"": ""Select how to stream MS Exchange Admin Audit event logs"", ""instructions"": [{""parameters"": {""title"": ""MS Exchange Admin Audit event logs"", ""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Microsoft Exchange Admin Audit Events logs are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCR.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption1-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace Name** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCR, Type Event log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'Windows Event logs' and select 'Custom' option, enter 'MSExchange Management' as expression and Add it.\n6. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\nConfigure the Events you want to collect and their severities.\n\n1. Under workspace **Legacy agents management**, select **Windows Event logs**.\n2. Click **Add Windows event log** and enter **MSExchange Management** as log name.\n3. Collect Error, Warning and Information types\n4. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 2] Security/Application/System logs of Exchange Servers"", ""description"": ""Select how to stream Security/Application/System logs of Exchange Servers"", ""instructions"": [{""parameters"": {""title"": ""Security Event log collection"", ""instructionSteps"": [{""title"": ""Data Collection Rules - Security Event logs"", ""description"": ""**Enable data collection rule for Security Logs**\nSecurity Events logs are collected only from **Windows** agents.\n1. Add Exchange Servers on *Resources* tab.\n2. Select Security log level\n\n> **Common level** is the minimum required. Please select 'Common' or 'All Security Events' on DCR definition."", ""instructions"": [{""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 0}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""title"": ""Application and System Event log collection"", ""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Application and System Events logs are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCR.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace Name** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCR, Type Event log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'Windows Event logs' and select 'Basic' option.\n6. For Application, select 'Critical', 'Error' and 'Warning'. For System, select Critical/Error/Warning/Information. \n7. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\nConfigure the Events you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **Windows Event logs**.\n2. Click **Add Windows event log** and search **Application** as log name.\n3. Click **Add Windows event log** and search **System** as log name.\n4. Collect Error (for all), Warning (for all) and Information (for System) types\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 3 and 4] Security logs of Domain Controllers"", ""description"": ""Select how to stream Security logs of Domain Controllers. If you want to implement Option 3, you just need to select DC on same site as Exchange Servers. If you want to implement Option 4, you can select all DCs of your forest."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""[Option 3] List only Domain Controllers on the same site as Exchange Servers for next step"", ""description"": ""**This limits the quantity of data injested but some incident can't be detected.**""}, {""title"": ""[Option 4] List all Domain Controllers of your Active-Directory Forest for next step"", ""description"": ""**This allows collecting all security events**""}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""title"": ""Security Event log collection"", ""instructionSteps"": [{""title"": ""Data Collection Rules - Security Event logs"", ""description"": ""**Enable data collection rule for Security Logs**\nSecurity Events logs are collected only from **Windows** agents.\n1. Add chosen DCs on *Resources* tab.\n2. Select Security log level\n\n> **Common level** is the minimum required. Please select 'Common' or 'All Security Events' on DCR definition."", ""instructions"": [{""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 0}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 5] IIS logs of Exchange Servers"", ""description"": ""Select how to stream IIS logs of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> IIS logs are collected only from **Windows** agents."", ""instructions"": [{""type"": ""AdminAuditEvents""}, {""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption5-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create DCR, Type IIS log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. Select the created DCE. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'IIS logs' (Do not enter a path if IIS Logs path is configured by default). Click on 'Add data source'\n6. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\nConfigure the Events you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **IIS Logs**.\n2. Check **Collect W3C format IIS log files**\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 6] Message Tracking of Exchange Servers"", ""description"": ""Select how to stream Message Tracking of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Message Tracking are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""text"": ""**Attention**, Custom logs in Monitor Agent is in Preview. The deployment doesn't work as expected for the moment (March 2023)."", ""inline"": false}, ""type"": ""InfoMessage""}, {""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule and Custom Table"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption6-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE, like ESI-ExchangeServers. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create Custom DCR Table"", ""description"": ""1. Download the Example file from [Microsoft Sentinel GitHub](https://aka.ms/Sentinel-Sample-ESI-MessageTrackingExampleFile).\n2. From the Azure Portal, navigate to [Workspace Analytics](https://portal.azure.com/#view/HubsExtension/BrowseResource/resourceType/Microsoft.OperationalInsights%2Fworkspaces) and select your target Workspace.\n3. Click in 'Tables', click **+ Create** at the top and select **New Custom log (DCR-Based)**.\n4. In the **Basics** tab, enter **MessageTrackingLog** on the Table name, create a Data Collection rule with the name **DCR-Option6-MessageTrackingLogs** (for example) and select the previously created Data collection Endpoint.\n5. In the **Schema and Transformation** tab, choose the downloaded sample file and click on **Transformation Editor**.\n6. In the transformation field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(['date-time'])\n| extend\n clientHostname = ['client-hostname'],\n clientIP = ['client-ip'],\n connectorId = ['connector-id'],\n customData = ['custom-data'],\n eventId = ['event-id'],\n internalMessageId = ['internal-message-id'],\n logId = ['log-id'],\n messageId = ['message-id'],\n messageInfo = ['message-info'],\n messageSubject = ['message-subject'],\n networkMessageId = ['network-message-id'],\n originalClientIp = ['original-client-ip'],\n originalServerIp = ['original-server-ip'],\n recipientAddress= ['recipient-address'],\n recipientCount= ['recipient-count'],\n recipientStatus= ['recipient-status'],\n relatedRecipientAddress= ['related-recipient-address'],\n returnPath= ['return-path'],\n senderAddress= ['sender-address'],\n senderHostname= ['server-hostname'],\n serverIp= ['server-ip'],\n sourceContext= ['source-context'],\n schemaVersion=['schema-version'],\n messageTrackingTenantId = ['tenant-id'],\n totalBytes = ['total-bytes'],\n transportTrafficType = ['transport-traffic-type']\n| project-away\n ['client-ip'],\n ['client-hostname'],\n ['connector-id'],\n ['custom-data'],\n ['date-time'],\n ['event-id'],\n ['internal-message-id'],\n ['log-id'],\n ['message-id'],\n ['message-info'],\n ['message-subject'],\n ['network-message-id'],\n ['original-client-ip'],\n ['original-server-ip'],\n ['recipient-address'],\n ['recipient-count'],\n ['recipient-status'],\n ['related-recipient-address'],\n ['return-path'],\n ['sender-address'],\n ['server-hostname'],\n ['server-ip'],\n ['source-context'],\n ['schema-version'],\n ['tenant-id'],\n ['total-bytes'],\n ['transport-traffic-type']*\n\n8. Click 'Run' and after 'Apply'.\n9. Click **Next**, then click **Create**.""}, {""title"": ""C. Modify the created DCR, Type Custom log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Select the previously created DCR, like **DCR-Option6-MessageTrackingLogs**.\n3. In the **Resources** tab, enter you Exchange Servers.\n4. In **Data Sources**, add a Data Source type 'Custom Text logs' and enter 'C:\\Program Files\\Microsoft\\Exchange Server\\V15\\TransportRoles\\Logs\\MessageTracking\\*.log' in file pattern, 'MessageTrackingLog_CL' in Table Name.\n6.in Transform field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(['date-time'])\n| extend\n clientHostname = ['client-hostname'],\n clientIP = ['client-ip'],\n connectorId = ['connector-id'],\n customData = ['custom-data'],\n eventId = ['event-id'],\n internalMessageId = ['internal-message-id'],\n logId = ['log-id'],\n messageId = ['message-id'],\n messageInfo = ['message-info'],\n messageSubject = ['message-subject'],\n networkMessageId = ['network-message-id'],\n originalClientIp = ['original-client-ip'],\n originalServerIp = ['original-server-ip'],\n recipientAddress= ['recipient-address'],\n recipientCount= ['recipient-count'],\n recipientStatus= ['recipient-status'],\n relatedRecipientAddress= ['related-recipient-address'],\n returnPath= ['return-path'],\n senderAddress= ['sender-address'],\n senderHostname= ['server-hostname'],\n serverIp= ['server-ip'],\n sourceContext= ['source-context'],\n schemaVersion=['schema-version'],\n messageTrackingTenantId = ['tenant-id'],\n totalBytes = ['total-bytes'],\n transportTrafficType = ['transport-traffic-type']\n| project-away\n ['client-ip'],\n ['client-hostname'],\n ['connector-id'],\n ['custom-data'],\n ['date-time'],\n ['event-id'],\n ['internal-message-id'],\n ['log-id'],\n ['message-id'],\n ['message-info'],\n ['message-subject'],\n ['network-message-id'],\n ['original-client-ip'],\n ['original-server-ip'],\n ['recipient-address'],\n ['recipient-count'],\n ['recipient-status'],\n ['related-recipient-address'],\n ['return-path'],\n ['sender-address'],\n ['server-hostname'],\n ['server-ip'],\n ['source-context'],\n ['schema-version'],\n ['tenant-id'],\n ['total-bytes'],\n ['transport-traffic-type']* \n7. Click on 'Add data source'.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\n1. Under workspace **Settings** part, select **Tables**, click **+ Create** and click on **New custom log (MMA-Based)**.\n2. Select Sample file **[MessageTracking Sample](https://aka.ms/Sentinel-Sample-ESI-MessageTrackingLogsSampleCSV)** and click Next\n3. Select type **Windows** and enter the path **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\TransportRoles\\Logs\\MessageTracking\\*.log**. Click Next.\n4. Enter **MessageTrackingLog** as Table name and click Next.\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 7] HTTP Proxy of Exchange Servers"", ""description"": ""Select how to stream HTTP Proxy of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Message Tracking are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""text"": ""**Attention**, Custom logs in Monitor Agent is in Preview. The deployment doesn't work as expected for the moment (March 2023)."", ""inline"": false}, ""type"": ""InfoMessage""}, {""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption7-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create Custom DCR Table"", ""description"": ""1. Download the Example file from [Microsoft Sentinel GitHub](https://aka.ms/Sentinel-Sample-ESI-HTTPProxyExampleFile).\n2. From the Azure Portal, navigate to [Workspace Analytics](https://portal.azure.com/#view/HubsExtension/BrowseResource/resourceType/Microsoft.OperationalInsights%2Fworkspaces) and select your target Workspace.\n3. Click in 'Tables', click **+ Create** at the top and select **New Custom log (DCR-Based)**.\n4. In the **Basics** tab, enter **ExchangeHttpProxy** on the Table name, create a Data Collection rule with the name **DCR-Option7-HTTPProxyLogs** (for example) and select the previously created Data collection Endpoint.\n5. In the **Schema and Transformation** tab, choose the downloaded sample file and click on **Transformation Editor**.\n6. In the transformation field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(DateTime)\n| project-away DateTime\n*\n\n8. Click 'Run' and after 'Apply'.\n9. Click **Next**, then click **Create**.""}, {""title"": ""C. Modify the created DCR, Type Custom log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Select the previously created DCR, like **DCR-Option7-HTTPProxyLogs**.\n3. In the **Resources** tab, enter you Exchange Servers.\n4. In **Data Sources**, add a Data Source type 'Custom Text logs' and enter 'C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Autodiscover\\*.log' in file pattern, 'ExchangeHttpProxy_CL' in Table Name.\n6.in Transform field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(DateTime)\n| project-away DateTime* \n7. Click on 'Add data source'.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\n1. Under workspace **Settings** part, select **Tables**, click **+ Create** and click on **New custom log (MMA-Based)**.\n2. Select Sample file **[MessageTracking Sample](https://aka.ms/Sentinel-Sample-ESI-HttpProxySampleCSV)** and click Next\n3. Select type **Windows** and enter all the following paths **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Autodiscover\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Eas\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Ecp\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Ews\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Mapi\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Oab\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Owa\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\OwaCalendar\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\PowerShell\\*.log** and **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\RpcHttp\\*.log** . Click Next.\n4. Enter **ExchangeHttpProxy** as Table name and click Next.\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. Parsers are automatically deployed with the solution. Follow the steps to create the Kusto Functions alias : [**ExchangeAdminAuditLogs**](https://aka.ms/sentinel-ESI-ExchangeCollector-ExchangeAdminAuditLogs-parser)"", ""instructions"": [{""parameters"": {""title"": ""Parsers are automatically deployed during Solution deployment. If you want to deploy manually, follow the steps below"", ""instructionSteps"": [{""title"": ""Manual Parser Deployment"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""1. Download the Parser file"", ""description"": ""The latest version of the file [**ExchangeAdminAuditLogs**](https://aka.ms/sentinel-ESI-ExchangeCollector-ExchangeAdminAuditLogs-parser)""}, {""title"": ""2. Create Parser **ExchangeAdminAuditLogs** function"", ""description"": ""In 'Logs' explorer of your Microsoft Sentinel's log analytics, copy the content of the file to Log explorer""}, {""title"": ""3. Save Parser **ExchangeAdminAuditLogs** function"", ""description"": ""Click on save button.\n No parameter is needed for this parser.\nClick save again.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""Azure Log Analytics will be deprecated, to collect data from non-Azure VMs, Azure Arc is recommended. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""name"": ""Detailled documentation"", ""description"": "">**NOTE:** Detailled documentation on Installation procedure and usage can be found [here](https://aka.ms/MicrosoftExchangeSecurityGithub)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises/Data%20Connectors/ESI-ExchangeAdminAuditLogEvents.json","true" +"ExchangeHttpProxy_CL","Microsoft Exchange Security - Exchange On-Premises","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises","microsoftsentinelcommunity","azure-sentinel-solution-exchangesecurityinsights","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ESI-ExchangeAdminAuditLogEvents","Microsoft","[Deprecated] Microsoft Exchange Logs and Events","Deprecated, use the 'ESI-Opt' dataconnectors. You can stream all Exchange Audit events, IIS Logs, HTTP Proxy logs and Security Event logs from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you to view dashboards, create custom alerts, and improve investigation. This is used by Microsoft Exchange Security Workbooks to provide security insights of your On-Premises Exchange environment","[{""description"": "">**NOTE:** This solution is based on options. This allows you to choose which data will be ingest as some options can generate a very high volume of data. Depending on what you want to collect, track in your Workbooks, Analytics Rules, Hunting capabilities you will choose the option(s) you will deploy. Each options are independant for one from the other. To learn more about each option: ['Microsoft Exchange Security' wiki](https://aka.ms/ESI_DataConnectorOptions)""}, {""title"": ""1. Download and install the agents needed to collect logs for Microsoft Sentinel"", ""description"": ""Type of servers (Exchange Servers, Domain Controllers linked to Exchange Servers or all Domain Controllers) depends on the option you want to deploy."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Deploy Monitor Agents"", ""description"": ""This step is required only if it's the first time you onboard your Exchange Servers/Domain Controllers"", ""instructions"": [{""parameters"": {""title"": ""Select which agent you want to install in your servers to collect logs:"", ""instructionSteps"": [{""title"": ""[Prefered] Azure Monitor Agent via Azure Arc"", ""description"": ""**Deploy the Azure Arc Agent**\n> [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""title"": ""Install Azure Log Analytics Agent (Deprecated on 31/08/2024)"", ""description"": ""1. Download the Azure Log Analytics Agent and choose the deployment method in the below link."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Deploy log injestion following choosed options"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""[Option 1] MS Exchange Management Log collection"", ""description"": ""Select how to stream MS Exchange Admin Audit event logs"", ""instructions"": [{""parameters"": {""title"": ""MS Exchange Admin Audit event logs"", ""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Microsoft Exchange Admin Audit Events logs are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCR.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption1-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace Name** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCR, Type Event log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'Windows Event logs' and select 'Custom' option, enter 'MSExchange Management' as expression and Add it.\n6. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\nConfigure the Events you want to collect and their severities.\n\n1. Under workspace **Legacy agents management**, select **Windows Event logs**.\n2. Click **Add Windows event log** and enter **MSExchange Management** as log name.\n3. Collect Error, Warning and Information types\n4. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 2] Security/Application/System logs of Exchange Servers"", ""description"": ""Select how to stream Security/Application/System logs of Exchange Servers"", ""instructions"": [{""parameters"": {""title"": ""Security Event log collection"", ""instructionSteps"": [{""title"": ""Data Collection Rules - Security Event logs"", ""description"": ""**Enable data collection rule for Security Logs**\nSecurity Events logs are collected only from **Windows** agents.\n1. Add Exchange Servers on *Resources* tab.\n2. Select Security log level\n\n> **Common level** is the minimum required. Please select 'Common' or 'All Security Events' on DCR definition."", ""instructions"": [{""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 0}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""title"": ""Application and System Event log collection"", ""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Application and System Events logs are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCR.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace Name** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCR, Type Event log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'Windows Event logs' and select 'Basic' option.\n6. For Application, select 'Critical', 'Error' and 'Warning'. For System, select Critical/Error/Warning/Information. \n7. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\nConfigure the Events you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **Windows Event logs**.\n2. Click **Add Windows event log** and search **Application** as log name.\n3. Click **Add Windows event log** and search **System** as log name.\n4. Collect Error (for all), Warning (for all) and Information (for System) types\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 3 and 4] Security logs of Domain Controllers"", ""description"": ""Select how to stream Security logs of Domain Controllers. If you want to implement Option 3, you just need to select DC on same site as Exchange Servers. If you want to implement Option 4, you can select all DCs of your forest."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""[Option 3] List only Domain Controllers on the same site as Exchange Servers for next step"", ""description"": ""**This limits the quantity of data injested but some incident can't be detected.**""}, {""title"": ""[Option 4] List all Domain Controllers of your Active-Directory Forest for next step"", ""description"": ""**This allows collecting all security events**""}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""title"": ""Security Event log collection"", ""instructionSteps"": [{""title"": ""Data Collection Rules - Security Event logs"", ""description"": ""**Enable data collection rule for Security Logs**\nSecurity Events logs are collected only from **Windows** agents.\n1. Add chosen DCs on *Resources* tab.\n2. Select Security log level\n\n> **Common level** is the minimum required. Please select 'Common' or 'All Security Events' on DCR definition."", ""instructions"": [{""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 0}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 5] IIS logs of Exchange Servers"", ""description"": ""Select how to stream IIS logs of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> IIS logs are collected only from **Windows** agents."", ""instructions"": [{""type"": ""AdminAuditEvents""}, {""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption5-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create DCR, Type IIS log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. Select the created DCE. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'IIS logs' (Do not enter a path if IIS Logs path is configured by default). Click on 'Add data source'\n6. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\nConfigure the Events you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **IIS Logs**.\n2. Check **Collect W3C format IIS log files**\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 6] Message Tracking of Exchange Servers"", ""description"": ""Select how to stream Message Tracking of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Message Tracking are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""text"": ""**Attention**, Custom logs in Monitor Agent is in Preview. The deployment doesn't work as expected for the moment (March 2023)."", ""inline"": false}, ""type"": ""InfoMessage""}, {""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule and Custom Table"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption6-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE, like ESI-ExchangeServers. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create Custom DCR Table"", ""description"": ""1. Download the Example file from [Microsoft Sentinel GitHub](https://aka.ms/Sentinel-Sample-ESI-MessageTrackingExampleFile).\n2. From the Azure Portal, navigate to [Workspace Analytics](https://portal.azure.com/#view/HubsExtension/BrowseResource/resourceType/Microsoft.OperationalInsights%2Fworkspaces) and select your target Workspace.\n3. Click in 'Tables', click **+ Create** at the top and select **New Custom log (DCR-Based)**.\n4. In the **Basics** tab, enter **MessageTrackingLog** on the Table name, create a Data Collection rule with the name **DCR-Option6-MessageTrackingLogs** (for example) and select the previously created Data collection Endpoint.\n5. In the **Schema and Transformation** tab, choose the downloaded sample file and click on **Transformation Editor**.\n6. In the transformation field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(['date-time'])\n| extend\n clientHostname = ['client-hostname'],\n clientIP = ['client-ip'],\n connectorId = ['connector-id'],\n customData = ['custom-data'],\n eventId = ['event-id'],\n internalMessageId = ['internal-message-id'],\n logId = ['log-id'],\n messageId = ['message-id'],\n messageInfo = ['message-info'],\n messageSubject = ['message-subject'],\n networkMessageId = ['network-message-id'],\n originalClientIp = ['original-client-ip'],\n originalServerIp = ['original-server-ip'],\n recipientAddress= ['recipient-address'],\n recipientCount= ['recipient-count'],\n recipientStatus= ['recipient-status'],\n relatedRecipientAddress= ['related-recipient-address'],\n returnPath= ['return-path'],\n senderAddress= ['sender-address'],\n senderHostname= ['server-hostname'],\n serverIp= ['server-ip'],\n sourceContext= ['source-context'],\n schemaVersion=['schema-version'],\n messageTrackingTenantId = ['tenant-id'],\n totalBytes = ['total-bytes'],\n transportTrafficType = ['transport-traffic-type']\n| project-away\n ['client-ip'],\n ['client-hostname'],\n ['connector-id'],\n ['custom-data'],\n ['date-time'],\n ['event-id'],\n ['internal-message-id'],\n ['log-id'],\n ['message-id'],\n ['message-info'],\n ['message-subject'],\n ['network-message-id'],\n ['original-client-ip'],\n ['original-server-ip'],\n ['recipient-address'],\n ['recipient-count'],\n ['recipient-status'],\n ['related-recipient-address'],\n ['return-path'],\n ['sender-address'],\n ['server-hostname'],\n ['server-ip'],\n ['source-context'],\n ['schema-version'],\n ['tenant-id'],\n ['total-bytes'],\n ['transport-traffic-type']*\n\n8. Click 'Run' and after 'Apply'.\n9. Click **Next**, then click **Create**.""}, {""title"": ""C. Modify the created DCR, Type Custom log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Select the previously created DCR, like **DCR-Option6-MessageTrackingLogs**.\n3. In the **Resources** tab, enter you Exchange Servers.\n4. In **Data Sources**, add a Data Source type 'Custom Text logs' and enter 'C:\\Program Files\\Microsoft\\Exchange Server\\V15\\TransportRoles\\Logs\\MessageTracking\\*.log' in file pattern, 'MessageTrackingLog_CL' in Table Name.\n6.in Transform field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(['date-time'])\n| extend\n clientHostname = ['client-hostname'],\n clientIP = ['client-ip'],\n connectorId = ['connector-id'],\n customData = ['custom-data'],\n eventId = ['event-id'],\n internalMessageId = ['internal-message-id'],\n logId = ['log-id'],\n messageId = ['message-id'],\n messageInfo = ['message-info'],\n messageSubject = ['message-subject'],\n networkMessageId = ['network-message-id'],\n originalClientIp = ['original-client-ip'],\n originalServerIp = ['original-server-ip'],\n recipientAddress= ['recipient-address'],\n recipientCount= ['recipient-count'],\n recipientStatus= ['recipient-status'],\n relatedRecipientAddress= ['related-recipient-address'],\n returnPath= ['return-path'],\n senderAddress= ['sender-address'],\n senderHostname= ['server-hostname'],\n serverIp= ['server-ip'],\n sourceContext= ['source-context'],\n schemaVersion=['schema-version'],\n messageTrackingTenantId = ['tenant-id'],\n totalBytes = ['total-bytes'],\n transportTrafficType = ['transport-traffic-type']\n| project-away\n ['client-ip'],\n ['client-hostname'],\n ['connector-id'],\n ['custom-data'],\n ['date-time'],\n ['event-id'],\n ['internal-message-id'],\n ['log-id'],\n ['message-id'],\n ['message-info'],\n ['message-subject'],\n ['network-message-id'],\n ['original-client-ip'],\n ['original-server-ip'],\n ['recipient-address'],\n ['recipient-count'],\n ['recipient-status'],\n ['related-recipient-address'],\n ['return-path'],\n ['sender-address'],\n ['server-hostname'],\n ['server-ip'],\n ['source-context'],\n ['schema-version'],\n ['tenant-id'],\n ['total-bytes'],\n ['transport-traffic-type']* \n7. Click on 'Add data source'.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\n1. Under workspace **Settings** part, select **Tables**, click **+ Create** and click on **New custom log (MMA-Based)**.\n2. Select Sample file **[MessageTracking Sample](https://aka.ms/Sentinel-Sample-ESI-MessageTrackingLogsSampleCSV)** and click Next\n3. Select type **Windows** and enter the path **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\TransportRoles\\Logs\\MessageTracking\\*.log**. Click Next.\n4. Enter **MessageTrackingLog** as Table name and click Next.\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 7] HTTP Proxy of Exchange Servers"", ""description"": ""Select how to stream HTTP Proxy of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Message Tracking are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""text"": ""**Attention**, Custom logs in Monitor Agent is in Preview. The deployment doesn't work as expected for the moment (March 2023)."", ""inline"": false}, ""type"": ""InfoMessage""}, {""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption7-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create Custom DCR Table"", ""description"": ""1. Download the Example file from [Microsoft Sentinel GitHub](https://aka.ms/Sentinel-Sample-ESI-HTTPProxyExampleFile).\n2. From the Azure Portal, navigate to [Workspace Analytics](https://portal.azure.com/#view/HubsExtension/BrowseResource/resourceType/Microsoft.OperationalInsights%2Fworkspaces) and select your target Workspace.\n3. Click in 'Tables', click **+ Create** at the top and select **New Custom log (DCR-Based)**.\n4. In the **Basics** tab, enter **ExchangeHttpProxy** on the Table name, create a Data Collection rule with the name **DCR-Option7-HTTPProxyLogs** (for example) and select the previously created Data collection Endpoint.\n5. In the **Schema and Transformation** tab, choose the downloaded sample file and click on **Transformation Editor**.\n6. In the transformation field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(DateTime)\n| project-away DateTime\n*\n\n8. Click 'Run' and after 'Apply'.\n9. Click **Next**, then click **Create**.""}, {""title"": ""C. Modify the created DCR, Type Custom log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Select the previously created DCR, like **DCR-Option7-HTTPProxyLogs**.\n3. In the **Resources** tab, enter you Exchange Servers.\n4. In **Data Sources**, add a Data Source type 'Custom Text logs' and enter 'C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Autodiscover\\*.log' in file pattern, 'ExchangeHttpProxy_CL' in Table Name.\n6.in Transform field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(DateTime)\n| project-away DateTime* \n7. Click on 'Add data source'.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\n1. Under workspace **Settings** part, select **Tables**, click **+ Create** and click on **New custom log (MMA-Based)**.\n2. Select Sample file **[MessageTracking Sample](https://aka.ms/Sentinel-Sample-ESI-HttpProxySampleCSV)** and click Next\n3. Select type **Windows** and enter all the following paths **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Autodiscover\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Eas\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Ecp\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Ews\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Mapi\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Oab\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Owa\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\OwaCalendar\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\PowerShell\\*.log** and **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\RpcHttp\\*.log** . Click Next.\n4. Enter **ExchangeHttpProxy** as Table name and click Next.\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. Parsers are automatically deployed with the solution. Follow the steps to create the Kusto Functions alias : [**ExchangeAdminAuditLogs**](https://aka.ms/sentinel-ESI-ExchangeCollector-ExchangeAdminAuditLogs-parser)"", ""instructions"": [{""parameters"": {""title"": ""Parsers are automatically deployed during Solution deployment. If you want to deploy manually, follow the steps below"", ""instructionSteps"": [{""title"": ""Manual Parser Deployment"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""1. Download the Parser file"", ""description"": ""The latest version of the file [**ExchangeAdminAuditLogs**](https://aka.ms/sentinel-ESI-ExchangeCollector-ExchangeAdminAuditLogs-parser)""}, {""title"": ""2. Create Parser **ExchangeAdminAuditLogs** function"", ""description"": ""In 'Logs' explorer of your Microsoft Sentinel's log analytics, copy the content of the file to Log explorer""}, {""title"": ""3. Save Parser **ExchangeAdminAuditLogs** function"", ""description"": ""Click on save button.\n No parameter is needed for this parser.\nClick save again.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""Azure Log Analytics will be deprecated, to collect data from non-Azure VMs, Azure Arc is recommended. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""name"": ""Detailled documentation"", ""description"": "">**NOTE:** Detailled documentation on Installation procedure and usage can be found [here](https://aka.ms/MicrosoftExchangeSecurityGithub)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises/Data%20Connectors/ESI-ExchangeAdminAuditLogEvents.json","true" +"MessageTrackingLog_CL","Microsoft Exchange Security - Exchange On-Premises","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises","microsoftsentinelcommunity","azure-sentinel-solution-exchangesecurityinsights","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ESI-ExchangeAdminAuditLogEvents","Microsoft","[Deprecated] Microsoft Exchange Logs and Events","Deprecated, use the 'ESI-Opt' dataconnectors. You can stream all Exchange Audit events, IIS Logs, HTTP Proxy logs and Security Event logs from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you to view dashboards, create custom alerts, and improve investigation. This is used by Microsoft Exchange Security Workbooks to provide security insights of your On-Premises Exchange environment","[{""description"": "">**NOTE:** This solution is based on options. This allows you to choose which data will be ingest as some options can generate a very high volume of data. Depending on what you want to collect, track in your Workbooks, Analytics Rules, Hunting capabilities you will choose the option(s) you will deploy. Each options are independant for one from the other. To learn more about each option: ['Microsoft Exchange Security' wiki](https://aka.ms/ESI_DataConnectorOptions)""}, {""title"": ""1. Download and install the agents needed to collect logs for Microsoft Sentinel"", ""description"": ""Type of servers (Exchange Servers, Domain Controllers linked to Exchange Servers or all Domain Controllers) depends on the option you want to deploy."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Deploy Monitor Agents"", ""description"": ""This step is required only if it's the first time you onboard your Exchange Servers/Domain Controllers"", ""instructions"": [{""parameters"": {""title"": ""Select which agent you want to install in your servers to collect logs:"", ""instructionSteps"": [{""title"": ""[Prefered] Azure Monitor Agent via Azure Arc"", ""description"": ""**Deploy the Azure Arc Agent**\n> [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""title"": ""Install Azure Log Analytics Agent (Deprecated on 31/08/2024)"", ""description"": ""1. Download the Azure Log Analytics Agent and choose the deployment method in the below link."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Deploy log injestion following choosed options"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""[Option 1] MS Exchange Management Log collection"", ""description"": ""Select how to stream MS Exchange Admin Audit event logs"", ""instructions"": [{""parameters"": {""title"": ""MS Exchange Admin Audit event logs"", ""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Microsoft Exchange Admin Audit Events logs are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCR.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption1-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace Name** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCR, Type Event log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'Windows Event logs' and select 'Custom' option, enter 'MSExchange Management' as expression and Add it.\n6. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\nConfigure the Events you want to collect and their severities.\n\n1. Under workspace **Legacy agents management**, select **Windows Event logs**.\n2. Click **Add Windows event log** and enter **MSExchange Management** as log name.\n3. Collect Error, Warning and Information types\n4. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 2] Security/Application/System logs of Exchange Servers"", ""description"": ""Select how to stream Security/Application/System logs of Exchange Servers"", ""instructions"": [{""parameters"": {""title"": ""Security Event log collection"", ""instructionSteps"": [{""title"": ""Data Collection Rules - Security Event logs"", ""description"": ""**Enable data collection rule for Security Logs**\nSecurity Events logs are collected only from **Windows** agents.\n1. Add Exchange Servers on *Resources* tab.\n2. Select Security log level\n\n> **Common level** is the minimum required. Please select 'Common' or 'All Security Events' on DCR definition."", ""instructions"": [{""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 0}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""title"": ""Application and System Event log collection"", ""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Application and System Events logs are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCR.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace Name** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCR, Type Event log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'Windows Event logs' and select 'Basic' option.\n6. For Application, select 'Critical', 'Error' and 'Warning'. For System, select Critical/Error/Warning/Information. \n7. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\nConfigure the Events you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **Windows Event logs**.\n2. Click **Add Windows event log** and search **Application** as log name.\n3. Click **Add Windows event log** and search **System** as log name.\n4. Collect Error (for all), Warning (for all) and Information (for System) types\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 3 and 4] Security logs of Domain Controllers"", ""description"": ""Select how to stream Security logs of Domain Controllers. If you want to implement Option 3, you just need to select DC on same site as Exchange Servers. If you want to implement Option 4, you can select all DCs of your forest."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""[Option 3] List only Domain Controllers on the same site as Exchange Servers for next step"", ""description"": ""**This limits the quantity of data injested but some incident can't be detected.**""}, {""title"": ""[Option 4] List all Domain Controllers of your Active-Directory Forest for next step"", ""description"": ""**This allows collecting all security events**""}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""title"": ""Security Event log collection"", ""instructionSteps"": [{""title"": ""Data Collection Rules - Security Event logs"", ""description"": ""**Enable data collection rule for Security Logs**\nSecurity Events logs are collected only from **Windows** agents.\n1. Add chosen DCs on *Resources* tab.\n2. Select Security log level\n\n> **Common level** is the minimum required. Please select 'Common' or 'All Security Events' on DCR definition."", ""instructions"": [{""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 0}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 5] IIS logs of Exchange Servers"", ""description"": ""Select how to stream IIS logs of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> IIS logs are collected only from **Windows** agents."", ""instructions"": [{""type"": ""AdminAuditEvents""}, {""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption5-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create DCR, Type IIS log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. Select the created DCE. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'IIS logs' (Do not enter a path if IIS Logs path is configured by default). Click on 'Add data source'\n6. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\nConfigure the Events you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **IIS Logs**.\n2. Check **Collect W3C format IIS log files**\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 6] Message Tracking of Exchange Servers"", ""description"": ""Select how to stream Message Tracking of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Message Tracking are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""text"": ""**Attention**, Custom logs in Monitor Agent is in Preview. The deployment doesn't work as expected for the moment (March 2023)."", ""inline"": false}, ""type"": ""InfoMessage""}, {""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule and Custom Table"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption6-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE, like ESI-ExchangeServers. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create Custom DCR Table"", ""description"": ""1. Download the Example file from [Microsoft Sentinel GitHub](https://aka.ms/Sentinel-Sample-ESI-MessageTrackingExampleFile).\n2. From the Azure Portal, navigate to [Workspace Analytics](https://portal.azure.com/#view/HubsExtension/BrowseResource/resourceType/Microsoft.OperationalInsights%2Fworkspaces) and select your target Workspace.\n3. Click in 'Tables', click **+ Create** at the top and select **New Custom log (DCR-Based)**.\n4. In the **Basics** tab, enter **MessageTrackingLog** on the Table name, create a Data Collection rule with the name **DCR-Option6-MessageTrackingLogs** (for example) and select the previously created Data collection Endpoint.\n5. In the **Schema and Transformation** tab, choose the downloaded sample file and click on **Transformation Editor**.\n6. In the transformation field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(['date-time'])\n| extend\n clientHostname = ['client-hostname'],\n clientIP = ['client-ip'],\n connectorId = ['connector-id'],\n customData = ['custom-data'],\n eventId = ['event-id'],\n internalMessageId = ['internal-message-id'],\n logId = ['log-id'],\n messageId = ['message-id'],\n messageInfo = ['message-info'],\n messageSubject = ['message-subject'],\n networkMessageId = ['network-message-id'],\n originalClientIp = ['original-client-ip'],\n originalServerIp = ['original-server-ip'],\n recipientAddress= ['recipient-address'],\n recipientCount= ['recipient-count'],\n recipientStatus= ['recipient-status'],\n relatedRecipientAddress= ['related-recipient-address'],\n returnPath= ['return-path'],\n senderAddress= ['sender-address'],\n senderHostname= ['server-hostname'],\n serverIp= ['server-ip'],\n sourceContext= ['source-context'],\n schemaVersion=['schema-version'],\n messageTrackingTenantId = ['tenant-id'],\n totalBytes = ['total-bytes'],\n transportTrafficType = ['transport-traffic-type']\n| project-away\n ['client-ip'],\n ['client-hostname'],\n ['connector-id'],\n ['custom-data'],\n ['date-time'],\n ['event-id'],\n ['internal-message-id'],\n ['log-id'],\n ['message-id'],\n ['message-info'],\n ['message-subject'],\n ['network-message-id'],\n ['original-client-ip'],\n ['original-server-ip'],\n ['recipient-address'],\n ['recipient-count'],\n ['recipient-status'],\n ['related-recipient-address'],\n ['return-path'],\n ['sender-address'],\n ['server-hostname'],\n ['server-ip'],\n ['source-context'],\n ['schema-version'],\n ['tenant-id'],\n ['total-bytes'],\n ['transport-traffic-type']*\n\n8. Click 'Run' and after 'Apply'.\n9. Click **Next**, then click **Create**.""}, {""title"": ""C. Modify the created DCR, Type Custom log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Select the previously created DCR, like **DCR-Option6-MessageTrackingLogs**.\n3. In the **Resources** tab, enter you Exchange Servers.\n4. In **Data Sources**, add a Data Source type 'Custom Text logs' and enter 'C:\\Program Files\\Microsoft\\Exchange Server\\V15\\TransportRoles\\Logs\\MessageTracking\\*.log' in file pattern, 'MessageTrackingLog_CL' in Table Name.\n6.in Transform field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(['date-time'])\n| extend\n clientHostname = ['client-hostname'],\n clientIP = ['client-ip'],\n connectorId = ['connector-id'],\n customData = ['custom-data'],\n eventId = ['event-id'],\n internalMessageId = ['internal-message-id'],\n logId = ['log-id'],\n messageId = ['message-id'],\n messageInfo = ['message-info'],\n messageSubject = ['message-subject'],\n networkMessageId = ['network-message-id'],\n originalClientIp = ['original-client-ip'],\n originalServerIp = ['original-server-ip'],\n recipientAddress= ['recipient-address'],\n recipientCount= ['recipient-count'],\n recipientStatus= ['recipient-status'],\n relatedRecipientAddress= ['related-recipient-address'],\n returnPath= ['return-path'],\n senderAddress= ['sender-address'],\n senderHostname= ['server-hostname'],\n serverIp= ['server-ip'],\n sourceContext= ['source-context'],\n schemaVersion=['schema-version'],\n messageTrackingTenantId = ['tenant-id'],\n totalBytes = ['total-bytes'],\n transportTrafficType = ['transport-traffic-type']\n| project-away\n ['client-ip'],\n ['client-hostname'],\n ['connector-id'],\n ['custom-data'],\n ['date-time'],\n ['event-id'],\n ['internal-message-id'],\n ['log-id'],\n ['message-id'],\n ['message-info'],\n ['message-subject'],\n ['network-message-id'],\n ['original-client-ip'],\n ['original-server-ip'],\n ['recipient-address'],\n ['recipient-count'],\n ['recipient-status'],\n ['related-recipient-address'],\n ['return-path'],\n ['sender-address'],\n ['server-hostname'],\n ['server-ip'],\n ['source-context'],\n ['schema-version'],\n ['tenant-id'],\n ['total-bytes'],\n ['transport-traffic-type']* \n7. Click on 'Add data source'.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\n1. Under workspace **Settings** part, select **Tables**, click **+ Create** and click on **New custom log (MMA-Based)**.\n2. Select Sample file **[MessageTracking Sample](https://aka.ms/Sentinel-Sample-ESI-MessageTrackingLogsSampleCSV)** and click Next\n3. Select type **Windows** and enter the path **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\TransportRoles\\Logs\\MessageTracking\\*.log**. Click Next.\n4. Enter **MessageTrackingLog** as Table name and click Next.\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 7] HTTP Proxy of Exchange Servers"", ""description"": ""Select how to stream HTTP Proxy of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Message Tracking are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""text"": ""**Attention**, Custom logs in Monitor Agent is in Preview. The deployment doesn't work as expected for the moment (March 2023)."", ""inline"": false}, ""type"": ""InfoMessage""}, {""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption7-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create Custom DCR Table"", ""description"": ""1. Download the Example file from [Microsoft Sentinel GitHub](https://aka.ms/Sentinel-Sample-ESI-HTTPProxyExampleFile).\n2. From the Azure Portal, navigate to [Workspace Analytics](https://portal.azure.com/#view/HubsExtension/BrowseResource/resourceType/Microsoft.OperationalInsights%2Fworkspaces) and select your target Workspace.\n3. Click in 'Tables', click **+ Create** at the top and select **New Custom log (DCR-Based)**.\n4. In the **Basics** tab, enter **ExchangeHttpProxy** on the Table name, create a Data Collection rule with the name **DCR-Option7-HTTPProxyLogs** (for example) and select the previously created Data collection Endpoint.\n5. In the **Schema and Transformation** tab, choose the downloaded sample file and click on **Transformation Editor**.\n6. In the transformation field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(DateTime)\n| project-away DateTime\n*\n\n8. Click 'Run' and after 'Apply'.\n9. Click **Next**, then click **Create**.""}, {""title"": ""C. Modify the created DCR, Type Custom log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Select the previously created DCR, like **DCR-Option7-HTTPProxyLogs**.\n3. In the **Resources** tab, enter you Exchange Servers.\n4. In **Data Sources**, add a Data Source type 'Custom Text logs' and enter 'C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Autodiscover\\*.log' in file pattern, 'ExchangeHttpProxy_CL' in Table Name.\n6.in Transform field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(DateTime)\n| project-away DateTime* \n7. Click on 'Add data source'.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\n1. Under workspace **Settings** part, select **Tables**, click **+ Create** and click on **New custom log (MMA-Based)**.\n2. Select Sample file **[MessageTracking Sample](https://aka.ms/Sentinel-Sample-ESI-HttpProxySampleCSV)** and click Next\n3. Select type **Windows** and enter all the following paths **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Autodiscover\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Eas\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Ecp\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Ews\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Mapi\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Oab\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Owa\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\OwaCalendar\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\PowerShell\\*.log** and **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\RpcHttp\\*.log** . Click Next.\n4. Enter **ExchangeHttpProxy** as Table name and click Next.\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. Parsers are automatically deployed with the solution. Follow the steps to create the Kusto Functions alias : [**ExchangeAdminAuditLogs**](https://aka.ms/sentinel-ESI-ExchangeCollector-ExchangeAdminAuditLogs-parser)"", ""instructions"": [{""parameters"": {""title"": ""Parsers are automatically deployed during Solution deployment. If you want to deploy manually, follow the steps below"", ""instructionSteps"": [{""title"": ""Manual Parser Deployment"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""1. Download the Parser file"", ""description"": ""The latest version of the file [**ExchangeAdminAuditLogs**](https://aka.ms/sentinel-ESI-ExchangeCollector-ExchangeAdminAuditLogs-parser)""}, {""title"": ""2. Create Parser **ExchangeAdminAuditLogs** function"", ""description"": ""In 'Logs' explorer of your Microsoft Sentinel's log analytics, copy the content of the file to Log explorer""}, {""title"": ""3. Save Parser **ExchangeAdminAuditLogs** function"", ""description"": ""Click on save button.\n No parameter is needed for this parser.\nClick save again.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""Azure Log Analytics will be deprecated, to collect data from non-Azure VMs, Azure Arc is recommended. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""name"": ""Detailled documentation"", ""description"": "">**NOTE:** Detailled documentation on Installation procedure and usage can be found [here](https://aka.ms/MicrosoftExchangeSecurityGithub)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises/Data%20Connectors/ESI-ExchangeAdminAuditLogEvents.json","true" +"SecurityEvent","Microsoft Exchange Security - Exchange On-Premises","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises","microsoftsentinelcommunity","azure-sentinel-solution-exchangesecurityinsights","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ESI-ExchangeAdminAuditLogEvents","Microsoft","[Deprecated] Microsoft Exchange Logs and Events","Deprecated, use the 'ESI-Opt' dataconnectors. You can stream all Exchange Audit events, IIS Logs, HTTP Proxy logs and Security Event logs from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you to view dashboards, create custom alerts, and improve investigation. This is used by Microsoft Exchange Security Workbooks to provide security insights of your On-Premises Exchange environment","[{""description"": "">**NOTE:** This solution is based on options. This allows you to choose which data will be ingest as some options can generate a very high volume of data. Depending on what you want to collect, track in your Workbooks, Analytics Rules, Hunting capabilities you will choose the option(s) you will deploy. Each options are independant for one from the other. To learn more about each option: ['Microsoft Exchange Security' wiki](https://aka.ms/ESI_DataConnectorOptions)""}, {""title"": ""1. Download and install the agents needed to collect logs for Microsoft Sentinel"", ""description"": ""Type of servers (Exchange Servers, Domain Controllers linked to Exchange Servers or all Domain Controllers) depends on the option you want to deploy."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Deploy Monitor Agents"", ""description"": ""This step is required only if it's the first time you onboard your Exchange Servers/Domain Controllers"", ""instructions"": [{""parameters"": {""title"": ""Select which agent you want to install in your servers to collect logs:"", ""instructionSteps"": [{""title"": ""[Prefered] Azure Monitor Agent via Azure Arc"", ""description"": ""**Deploy the Azure Arc Agent**\n> [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""title"": ""Install Azure Log Analytics Agent (Deprecated on 31/08/2024)"", ""description"": ""1. Download the Azure Log Analytics Agent and choose the deployment method in the below link."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Deploy log injestion following choosed options"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""[Option 1] MS Exchange Management Log collection"", ""description"": ""Select how to stream MS Exchange Admin Audit event logs"", ""instructions"": [{""parameters"": {""title"": ""MS Exchange Admin Audit event logs"", ""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Microsoft Exchange Admin Audit Events logs are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCR.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption1-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace Name** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCR, Type Event log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'Windows Event logs' and select 'Custom' option, enter 'MSExchange Management' as expression and Add it.\n6. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\nConfigure the Events you want to collect and their severities.\n\n1. Under workspace **Legacy agents management**, select **Windows Event logs**.\n2. Click **Add Windows event log** and enter **MSExchange Management** as log name.\n3. Collect Error, Warning and Information types\n4. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 2] Security/Application/System logs of Exchange Servers"", ""description"": ""Select how to stream Security/Application/System logs of Exchange Servers"", ""instructions"": [{""parameters"": {""title"": ""Security Event log collection"", ""instructionSteps"": [{""title"": ""Data Collection Rules - Security Event logs"", ""description"": ""**Enable data collection rule for Security Logs**\nSecurity Events logs are collected only from **Windows** agents.\n1. Add Exchange Servers on *Resources* tab.\n2. Select Security log level\n\n> **Common level** is the minimum required. Please select 'Common' or 'All Security Events' on DCR definition."", ""instructions"": [{""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 0}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""title"": ""Application and System Event log collection"", ""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Application and System Events logs are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCR.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace Name** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCR, Type Event log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'Windows Event logs' and select 'Basic' option.\n6. For Application, select 'Critical', 'Error' and 'Warning'. For System, select Critical/Error/Warning/Information. \n7. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\nConfigure the Events you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **Windows Event logs**.\n2. Click **Add Windows event log** and search **Application** as log name.\n3. Click **Add Windows event log** and search **System** as log name.\n4. Collect Error (for all), Warning (for all) and Information (for System) types\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 3 and 4] Security logs of Domain Controllers"", ""description"": ""Select how to stream Security logs of Domain Controllers. If you want to implement Option 3, you just need to select DC on same site as Exchange Servers. If you want to implement Option 4, you can select all DCs of your forest."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""[Option 3] List only Domain Controllers on the same site as Exchange Servers for next step"", ""description"": ""**This limits the quantity of data injested but some incident can't be detected.**""}, {""title"": ""[Option 4] List all Domain Controllers of your Active-Directory Forest for next step"", ""description"": ""**This allows collecting all security events**""}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""title"": ""Security Event log collection"", ""instructionSteps"": [{""title"": ""Data Collection Rules - Security Event logs"", ""description"": ""**Enable data collection rule for Security Logs**\nSecurity Events logs are collected only from **Windows** agents.\n1. Add chosen DCs on *Resources* tab.\n2. Select Security log level\n\n> **Common level** is the minimum required. Please select 'Common' or 'All Security Events' on DCR definition."", ""instructions"": [{""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 0}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 5] IIS logs of Exchange Servers"", ""description"": ""Select how to stream IIS logs of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> IIS logs are collected only from **Windows** agents."", ""instructions"": [{""type"": ""AdminAuditEvents""}, {""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption5-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create DCR, Type IIS log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. Select the created DCE. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'IIS logs' (Do not enter a path if IIS Logs path is configured by default). Click on 'Add data source'\n6. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\nConfigure the Events you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **IIS Logs**.\n2. Check **Collect W3C format IIS log files**\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 6] Message Tracking of Exchange Servers"", ""description"": ""Select how to stream Message Tracking of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Message Tracking are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""text"": ""**Attention**, Custom logs in Monitor Agent is in Preview. The deployment doesn't work as expected for the moment (March 2023)."", ""inline"": false}, ""type"": ""InfoMessage""}, {""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule and Custom Table"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption6-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE, like ESI-ExchangeServers. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create Custom DCR Table"", ""description"": ""1. Download the Example file from [Microsoft Sentinel GitHub](https://aka.ms/Sentinel-Sample-ESI-MessageTrackingExampleFile).\n2. From the Azure Portal, navigate to [Workspace Analytics](https://portal.azure.com/#view/HubsExtension/BrowseResource/resourceType/Microsoft.OperationalInsights%2Fworkspaces) and select your target Workspace.\n3. Click in 'Tables', click **+ Create** at the top and select **New Custom log (DCR-Based)**.\n4. In the **Basics** tab, enter **MessageTrackingLog** on the Table name, create a Data Collection rule with the name **DCR-Option6-MessageTrackingLogs** (for example) and select the previously created Data collection Endpoint.\n5. In the **Schema and Transformation** tab, choose the downloaded sample file and click on **Transformation Editor**.\n6. In the transformation field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(['date-time'])\n| extend\n clientHostname = ['client-hostname'],\n clientIP = ['client-ip'],\n connectorId = ['connector-id'],\n customData = ['custom-data'],\n eventId = ['event-id'],\n internalMessageId = ['internal-message-id'],\n logId = ['log-id'],\n messageId = ['message-id'],\n messageInfo = ['message-info'],\n messageSubject = ['message-subject'],\n networkMessageId = ['network-message-id'],\n originalClientIp = ['original-client-ip'],\n originalServerIp = ['original-server-ip'],\n recipientAddress= ['recipient-address'],\n recipientCount= ['recipient-count'],\n recipientStatus= ['recipient-status'],\n relatedRecipientAddress= ['related-recipient-address'],\n returnPath= ['return-path'],\n senderAddress= ['sender-address'],\n senderHostname= ['server-hostname'],\n serverIp= ['server-ip'],\n sourceContext= ['source-context'],\n schemaVersion=['schema-version'],\n messageTrackingTenantId = ['tenant-id'],\n totalBytes = ['total-bytes'],\n transportTrafficType = ['transport-traffic-type']\n| project-away\n ['client-ip'],\n ['client-hostname'],\n ['connector-id'],\n ['custom-data'],\n ['date-time'],\n ['event-id'],\n ['internal-message-id'],\n ['log-id'],\n ['message-id'],\n ['message-info'],\n ['message-subject'],\n ['network-message-id'],\n ['original-client-ip'],\n ['original-server-ip'],\n ['recipient-address'],\n ['recipient-count'],\n ['recipient-status'],\n ['related-recipient-address'],\n ['return-path'],\n ['sender-address'],\n ['server-hostname'],\n ['server-ip'],\n ['source-context'],\n ['schema-version'],\n ['tenant-id'],\n ['total-bytes'],\n ['transport-traffic-type']*\n\n8. Click 'Run' and after 'Apply'.\n9. Click **Next**, then click **Create**.""}, {""title"": ""C. Modify the created DCR, Type Custom log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Select the previously created DCR, like **DCR-Option6-MessageTrackingLogs**.\n3. In the **Resources** tab, enter you Exchange Servers.\n4. In **Data Sources**, add a Data Source type 'Custom Text logs' and enter 'C:\\Program Files\\Microsoft\\Exchange Server\\V15\\TransportRoles\\Logs\\MessageTracking\\*.log' in file pattern, 'MessageTrackingLog_CL' in Table Name.\n6.in Transform field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(['date-time'])\n| extend\n clientHostname = ['client-hostname'],\n clientIP = ['client-ip'],\n connectorId = ['connector-id'],\n customData = ['custom-data'],\n eventId = ['event-id'],\n internalMessageId = ['internal-message-id'],\n logId = ['log-id'],\n messageId = ['message-id'],\n messageInfo = ['message-info'],\n messageSubject = ['message-subject'],\n networkMessageId = ['network-message-id'],\n originalClientIp = ['original-client-ip'],\n originalServerIp = ['original-server-ip'],\n recipientAddress= ['recipient-address'],\n recipientCount= ['recipient-count'],\n recipientStatus= ['recipient-status'],\n relatedRecipientAddress= ['related-recipient-address'],\n returnPath= ['return-path'],\n senderAddress= ['sender-address'],\n senderHostname= ['server-hostname'],\n serverIp= ['server-ip'],\n sourceContext= ['source-context'],\n schemaVersion=['schema-version'],\n messageTrackingTenantId = ['tenant-id'],\n totalBytes = ['total-bytes'],\n transportTrafficType = ['transport-traffic-type']\n| project-away\n ['client-ip'],\n ['client-hostname'],\n ['connector-id'],\n ['custom-data'],\n ['date-time'],\n ['event-id'],\n ['internal-message-id'],\n ['log-id'],\n ['message-id'],\n ['message-info'],\n ['message-subject'],\n ['network-message-id'],\n ['original-client-ip'],\n ['original-server-ip'],\n ['recipient-address'],\n ['recipient-count'],\n ['recipient-status'],\n ['related-recipient-address'],\n ['return-path'],\n ['sender-address'],\n ['server-hostname'],\n ['server-ip'],\n ['source-context'],\n ['schema-version'],\n ['tenant-id'],\n ['total-bytes'],\n ['transport-traffic-type']* \n7. Click on 'Add data source'.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\n1. Under workspace **Settings** part, select **Tables**, click **+ Create** and click on **New custom log (MMA-Based)**.\n2. Select Sample file **[MessageTracking Sample](https://aka.ms/Sentinel-Sample-ESI-MessageTrackingLogsSampleCSV)** and click Next\n3. Select type **Windows** and enter the path **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\TransportRoles\\Logs\\MessageTracking\\*.log**. Click Next.\n4. Enter **MessageTrackingLog** as Table name and click Next.\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 7] HTTP Proxy of Exchange Servers"", ""description"": ""Select how to stream HTTP Proxy of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Message Tracking are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""text"": ""**Attention**, Custom logs in Monitor Agent is in Preview. The deployment doesn't work as expected for the moment (March 2023)."", ""inline"": false}, ""type"": ""InfoMessage""}, {""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption7-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create Custom DCR Table"", ""description"": ""1. Download the Example file from [Microsoft Sentinel GitHub](https://aka.ms/Sentinel-Sample-ESI-HTTPProxyExampleFile).\n2. From the Azure Portal, navigate to [Workspace Analytics](https://portal.azure.com/#view/HubsExtension/BrowseResource/resourceType/Microsoft.OperationalInsights%2Fworkspaces) and select your target Workspace.\n3. Click in 'Tables', click **+ Create** at the top and select **New Custom log (DCR-Based)**.\n4. In the **Basics** tab, enter **ExchangeHttpProxy** on the Table name, create a Data Collection rule with the name **DCR-Option7-HTTPProxyLogs** (for example) and select the previously created Data collection Endpoint.\n5. In the **Schema and Transformation** tab, choose the downloaded sample file and click on **Transformation Editor**.\n6. In the transformation field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(DateTime)\n| project-away DateTime\n*\n\n8. Click 'Run' and after 'Apply'.\n9. Click **Next**, then click **Create**.""}, {""title"": ""C. Modify the created DCR, Type Custom log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Select the previously created DCR, like **DCR-Option7-HTTPProxyLogs**.\n3. In the **Resources** tab, enter you Exchange Servers.\n4. In **Data Sources**, add a Data Source type 'Custom Text logs' and enter 'C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Autodiscover\\*.log' in file pattern, 'ExchangeHttpProxy_CL' in Table Name.\n6.in Transform field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(DateTime)\n| project-away DateTime* \n7. Click on 'Add data source'.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\n1. Under workspace **Settings** part, select **Tables**, click **+ Create** and click on **New custom log (MMA-Based)**.\n2. Select Sample file **[MessageTracking Sample](https://aka.ms/Sentinel-Sample-ESI-HttpProxySampleCSV)** and click Next\n3. Select type **Windows** and enter all the following paths **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Autodiscover\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Eas\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Ecp\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Ews\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Mapi\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Oab\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Owa\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\OwaCalendar\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\PowerShell\\*.log** and **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\RpcHttp\\*.log** . Click Next.\n4. Enter **ExchangeHttpProxy** as Table name and click Next.\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. Parsers are automatically deployed with the solution. Follow the steps to create the Kusto Functions alias : [**ExchangeAdminAuditLogs**](https://aka.ms/sentinel-ESI-ExchangeCollector-ExchangeAdminAuditLogs-parser)"", ""instructions"": [{""parameters"": {""title"": ""Parsers are automatically deployed during Solution deployment. If you want to deploy manually, follow the steps below"", ""instructionSteps"": [{""title"": ""Manual Parser Deployment"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""1. Download the Parser file"", ""description"": ""The latest version of the file [**ExchangeAdminAuditLogs**](https://aka.ms/sentinel-ESI-ExchangeCollector-ExchangeAdminAuditLogs-parser)""}, {""title"": ""2. Create Parser **ExchangeAdminAuditLogs** function"", ""description"": ""In 'Logs' explorer of your Microsoft Sentinel's log analytics, copy the content of the file to Log explorer""}, {""title"": ""3. Save Parser **ExchangeAdminAuditLogs** function"", ""description"": ""Click on save button.\n No parameter is needed for this parser.\nClick save again.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""Azure Log Analytics will be deprecated, to collect data from non-Azure VMs, Azure Arc is recommended. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""name"": ""Detailled documentation"", ""description"": "">**NOTE:** Detailled documentation on Installation procedure and usage can be found [here](https://aka.ms/MicrosoftExchangeSecurityGithub)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises/Data%20Connectors/ESI-ExchangeAdminAuditLogEvents.json","true" +"W3CIISLog","Microsoft Exchange Security - Exchange On-Premises","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises","microsoftsentinelcommunity","azure-sentinel-solution-exchangesecurityinsights","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ESI-ExchangeAdminAuditLogEvents","Microsoft","[Deprecated] Microsoft Exchange Logs and Events","Deprecated, use the 'ESI-Opt' dataconnectors. You can stream all Exchange Audit events, IIS Logs, HTTP Proxy logs and Security Event logs from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you to view dashboards, create custom alerts, and improve investigation. This is used by Microsoft Exchange Security Workbooks to provide security insights of your On-Premises Exchange environment","[{""description"": "">**NOTE:** This solution is based on options. This allows you to choose which data will be ingest as some options can generate a very high volume of data. Depending on what you want to collect, track in your Workbooks, Analytics Rules, Hunting capabilities you will choose the option(s) you will deploy. Each options are independant for one from the other. To learn more about each option: ['Microsoft Exchange Security' wiki](https://aka.ms/ESI_DataConnectorOptions)""}, {""title"": ""1. Download and install the agents needed to collect logs for Microsoft Sentinel"", ""description"": ""Type of servers (Exchange Servers, Domain Controllers linked to Exchange Servers or all Domain Controllers) depends on the option you want to deploy."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Deploy Monitor Agents"", ""description"": ""This step is required only if it's the first time you onboard your Exchange Servers/Domain Controllers"", ""instructions"": [{""parameters"": {""title"": ""Select which agent you want to install in your servers to collect logs:"", ""instructionSteps"": [{""title"": ""[Prefered] Azure Monitor Agent via Azure Arc"", ""description"": ""**Deploy the Azure Arc Agent**\n> [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""title"": ""Install Azure Log Analytics Agent (Deprecated on 31/08/2024)"", ""description"": ""1. Download the Azure Log Analytics Agent and choose the deployment method in the below link."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Deploy log injestion following choosed options"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""[Option 1] MS Exchange Management Log collection"", ""description"": ""Select how to stream MS Exchange Admin Audit event logs"", ""instructions"": [{""parameters"": {""title"": ""MS Exchange Admin Audit event logs"", ""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Microsoft Exchange Admin Audit Events logs are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCR.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption1-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace Name** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCR, Type Event log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'Windows Event logs' and select 'Custom' option, enter 'MSExchange Management' as expression and Add it.\n6. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\nConfigure the Events you want to collect and their severities.\n\n1. Under workspace **Legacy agents management**, select **Windows Event logs**.\n2. Click **Add Windows event log** and enter **MSExchange Management** as log name.\n3. Collect Error, Warning and Information types\n4. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 2] Security/Application/System logs of Exchange Servers"", ""description"": ""Select how to stream Security/Application/System logs of Exchange Servers"", ""instructions"": [{""parameters"": {""title"": ""Security Event log collection"", ""instructionSteps"": [{""title"": ""Data Collection Rules - Security Event logs"", ""description"": ""**Enable data collection rule for Security Logs**\nSecurity Events logs are collected only from **Windows** agents.\n1. Add Exchange Servers on *Resources* tab.\n2. Select Security log level\n\n> **Common level** is the minimum required. Please select 'Common' or 'All Security Events' on DCR definition."", ""instructions"": [{""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 0}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""title"": ""Application and System Event log collection"", ""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Application and System Events logs are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCR.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace Name** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCR, Type Event log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'Windows Event logs' and select 'Basic' option.\n6. For Application, select 'Critical', 'Error' and 'Warning'. For System, select Critical/Error/Warning/Information. \n7. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\nConfigure the Events you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **Windows Event logs**.\n2. Click **Add Windows event log** and search **Application** as log name.\n3. Click **Add Windows event log** and search **System** as log name.\n4. Collect Error (for all), Warning (for all) and Information (for System) types\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 3 and 4] Security logs of Domain Controllers"", ""description"": ""Select how to stream Security logs of Domain Controllers. If you want to implement Option 3, you just need to select DC on same site as Exchange Servers. If you want to implement Option 4, you can select all DCs of your forest."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""[Option 3] List only Domain Controllers on the same site as Exchange Servers for next step"", ""description"": ""**This limits the quantity of data injested but some incident can't be detected.**""}, {""title"": ""[Option 4] List all Domain Controllers of your Active-Directory Forest for next step"", ""description"": ""**This allows collecting all security events**""}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""title"": ""Security Event log collection"", ""instructionSteps"": [{""title"": ""Data Collection Rules - Security Event logs"", ""description"": ""**Enable data collection rule for Security Logs**\nSecurity Events logs are collected only from **Windows** agents.\n1. Add chosen DCs on *Resources* tab.\n2. Select Security log level\n\n> **Common level** is the minimum required. Please select 'Common' or 'All Security Events' on DCR definition."", ""instructions"": [{""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 0}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 5] IIS logs of Exchange Servers"", ""description"": ""Select how to stream IIS logs of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> IIS logs are collected only from **Windows** agents."", ""instructions"": [{""type"": ""AdminAuditEvents""}, {""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption5-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create DCR, Type IIS log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. Select the created DCE. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'IIS logs' (Do not enter a path if IIS Logs path is configured by default). Click on 'Add data source'\n6. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\nConfigure the Events you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **IIS Logs**.\n2. Check **Collect W3C format IIS log files**\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 6] Message Tracking of Exchange Servers"", ""description"": ""Select how to stream Message Tracking of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Message Tracking are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""text"": ""**Attention**, Custom logs in Monitor Agent is in Preview. The deployment doesn't work as expected for the moment (March 2023)."", ""inline"": false}, ""type"": ""InfoMessage""}, {""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule and Custom Table"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption6-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE, like ESI-ExchangeServers. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create Custom DCR Table"", ""description"": ""1. Download the Example file from [Microsoft Sentinel GitHub](https://aka.ms/Sentinel-Sample-ESI-MessageTrackingExampleFile).\n2. From the Azure Portal, navigate to [Workspace Analytics](https://portal.azure.com/#view/HubsExtension/BrowseResource/resourceType/Microsoft.OperationalInsights%2Fworkspaces) and select your target Workspace.\n3. Click in 'Tables', click **+ Create** at the top and select **New Custom log (DCR-Based)**.\n4. In the **Basics** tab, enter **MessageTrackingLog** on the Table name, create a Data Collection rule with the name **DCR-Option6-MessageTrackingLogs** (for example) and select the previously created Data collection Endpoint.\n5. In the **Schema and Transformation** tab, choose the downloaded sample file and click on **Transformation Editor**.\n6. In the transformation field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(['date-time'])\n| extend\n clientHostname = ['client-hostname'],\n clientIP = ['client-ip'],\n connectorId = ['connector-id'],\n customData = ['custom-data'],\n eventId = ['event-id'],\n internalMessageId = ['internal-message-id'],\n logId = ['log-id'],\n messageId = ['message-id'],\n messageInfo = ['message-info'],\n messageSubject = ['message-subject'],\n networkMessageId = ['network-message-id'],\n originalClientIp = ['original-client-ip'],\n originalServerIp = ['original-server-ip'],\n recipientAddress= ['recipient-address'],\n recipientCount= ['recipient-count'],\n recipientStatus= ['recipient-status'],\n relatedRecipientAddress= ['related-recipient-address'],\n returnPath= ['return-path'],\n senderAddress= ['sender-address'],\n senderHostname= ['server-hostname'],\n serverIp= ['server-ip'],\n sourceContext= ['source-context'],\n schemaVersion=['schema-version'],\n messageTrackingTenantId = ['tenant-id'],\n totalBytes = ['total-bytes'],\n transportTrafficType = ['transport-traffic-type']\n| project-away\n ['client-ip'],\n ['client-hostname'],\n ['connector-id'],\n ['custom-data'],\n ['date-time'],\n ['event-id'],\n ['internal-message-id'],\n ['log-id'],\n ['message-id'],\n ['message-info'],\n ['message-subject'],\n ['network-message-id'],\n ['original-client-ip'],\n ['original-server-ip'],\n ['recipient-address'],\n ['recipient-count'],\n ['recipient-status'],\n ['related-recipient-address'],\n ['return-path'],\n ['sender-address'],\n ['server-hostname'],\n ['server-ip'],\n ['source-context'],\n ['schema-version'],\n ['tenant-id'],\n ['total-bytes'],\n ['transport-traffic-type']*\n\n8. Click 'Run' and after 'Apply'.\n9. Click **Next**, then click **Create**.""}, {""title"": ""C. Modify the created DCR, Type Custom log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Select the previously created DCR, like **DCR-Option6-MessageTrackingLogs**.\n3. In the **Resources** tab, enter you Exchange Servers.\n4. In **Data Sources**, add a Data Source type 'Custom Text logs' and enter 'C:\\Program Files\\Microsoft\\Exchange Server\\V15\\TransportRoles\\Logs\\MessageTracking\\*.log' in file pattern, 'MessageTrackingLog_CL' in Table Name.\n6.in Transform field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(['date-time'])\n| extend\n clientHostname = ['client-hostname'],\n clientIP = ['client-ip'],\n connectorId = ['connector-id'],\n customData = ['custom-data'],\n eventId = ['event-id'],\n internalMessageId = ['internal-message-id'],\n logId = ['log-id'],\n messageId = ['message-id'],\n messageInfo = ['message-info'],\n messageSubject = ['message-subject'],\n networkMessageId = ['network-message-id'],\n originalClientIp = ['original-client-ip'],\n originalServerIp = ['original-server-ip'],\n recipientAddress= ['recipient-address'],\n recipientCount= ['recipient-count'],\n recipientStatus= ['recipient-status'],\n relatedRecipientAddress= ['related-recipient-address'],\n returnPath= ['return-path'],\n senderAddress= ['sender-address'],\n senderHostname= ['server-hostname'],\n serverIp= ['server-ip'],\n sourceContext= ['source-context'],\n schemaVersion=['schema-version'],\n messageTrackingTenantId = ['tenant-id'],\n totalBytes = ['total-bytes'],\n transportTrafficType = ['transport-traffic-type']\n| project-away\n ['client-ip'],\n ['client-hostname'],\n ['connector-id'],\n ['custom-data'],\n ['date-time'],\n ['event-id'],\n ['internal-message-id'],\n ['log-id'],\n ['message-id'],\n ['message-info'],\n ['message-subject'],\n ['network-message-id'],\n ['original-client-ip'],\n ['original-server-ip'],\n ['recipient-address'],\n ['recipient-count'],\n ['recipient-status'],\n ['related-recipient-address'],\n ['return-path'],\n ['sender-address'],\n ['server-hostname'],\n ['server-ip'],\n ['source-context'],\n ['schema-version'],\n ['tenant-id'],\n ['total-bytes'],\n ['transport-traffic-type']* \n7. Click on 'Add data source'.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\n1. Under workspace **Settings** part, select **Tables**, click **+ Create** and click on **New custom log (MMA-Based)**.\n2. Select Sample file **[MessageTracking Sample](https://aka.ms/Sentinel-Sample-ESI-MessageTrackingLogsSampleCSV)** and click Next\n3. Select type **Windows** and enter the path **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\TransportRoles\\Logs\\MessageTracking\\*.log**. Click Next.\n4. Enter **MessageTrackingLog** as Table name and click Next.\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""[Option 7] HTTP Proxy of Exchange Servers"", ""description"": ""Select how to stream HTTP Proxy of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Message Tracking are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""text"": ""**Attention**, Custom logs in Monitor Agent is in Preview. The deployment doesn't work as expected for the moment (March 2023)."", ""inline"": false}, ""type"": ""InfoMessage""}, {""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption7-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create Custom DCR Table"", ""description"": ""1. Download the Example file from [Microsoft Sentinel GitHub](https://aka.ms/Sentinel-Sample-ESI-HTTPProxyExampleFile).\n2. From the Azure Portal, navigate to [Workspace Analytics](https://portal.azure.com/#view/HubsExtension/BrowseResource/resourceType/Microsoft.OperationalInsights%2Fworkspaces) and select your target Workspace.\n3. Click in 'Tables', click **+ Create** at the top and select **New Custom log (DCR-Based)**.\n4. In the **Basics** tab, enter **ExchangeHttpProxy** on the Table name, create a Data Collection rule with the name **DCR-Option7-HTTPProxyLogs** (for example) and select the previously created Data collection Endpoint.\n5. In the **Schema and Transformation** tab, choose the downloaded sample file and click on **Transformation Editor**.\n6. In the transformation field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(DateTime)\n| project-away DateTime\n*\n\n8. Click 'Run' and after 'Apply'.\n9. Click **Next**, then click **Create**.""}, {""title"": ""C. Modify the created DCR, Type Custom log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Select the previously created DCR, like **DCR-Option7-HTTPProxyLogs**.\n3. In the **Resources** tab, enter you Exchange Servers.\n4. In **Data Sources**, add a Data Source type 'Custom Text logs' and enter 'C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Autodiscover\\*.log' in file pattern, 'ExchangeHttpProxy_CL' in Table Name.\n6.in Transform field, enter the following KQL request :\n*source\n| extend TimeGenerated = todatetime(DateTime)\n| project-away DateTime* \n7. Click on 'Add data source'.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Data Collection Rules - When the legacy Azure Log Analytics Agent is used"", ""description"": ""**Configure the logs to be collected**\n\n1. Under workspace **Settings** part, select **Tables**, click **+ Create** and click on **New custom log (MMA-Based)**.\n2. Select Sample file **[MessageTracking Sample](https://aka.ms/Sentinel-Sample-ESI-HttpProxySampleCSV)** and click Next\n3. Select type **Windows** and enter all the following paths **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Autodiscover\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Eas\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Ecp\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Ews\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Mapi\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Oab\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Owa\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\OwaCalendar\\*.log**, **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\PowerShell\\*.log** and **C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\RpcHttp\\*.log** . Click Next.\n4. Enter **ExchangeHttpProxy** as Table name and click Next.\n5. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. Parsers are automatically deployed with the solution. Follow the steps to create the Kusto Functions alias : [**ExchangeAdminAuditLogs**](https://aka.ms/sentinel-ESI-ExchangeCollector-ExchangeAdminAuditLogs-parser)"", ""instructions"": [{""parameters"": {""title"": ""Parsers are automatically deployed during Solution deployment. If you want to deploy manually, follow the steps below"", ""instructionSteps"": [{""title"": ""Manual Parser Deployment"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""1. Download the Parser file"", ""description"": ""The latest version of the file [**ExchangeAdminAuditLogs**](https://aka.ms/sentinel-ESI-ExchangeCollector-ExchangeAdminAuditLogs-parser)""}, {""title"": ""2. Create Parser **ExchangeAdminAuditLogs** function"", ""description"": ""In 'Logs' explorer of your Microsoft Sentinel's log analytics, copy the content of the file to Log explorer""}, {""title"": ""3. Save Parser **ExchangeAdminAuditLogs** function"", ""description"": ""Click on save button.\n No parameter is needed for this parser.\nClick save again.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""Azure Log Analytics will be deprecated, to collect data from non-Azure VMs, Azure Arc is recommended. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""name"": ""Detailled documentation"", ""description"": "">**NOTE:** Detailled documentation on Installation procedure and usage can be found [here](https://aka.ms/MicrosoftExchangeSecurityGithub)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises/Data%20Connectors/ESI-ExchangeAdminAuditLogEvents.json","true" +"ESIExchangeConfig_CL","Microsoft Exchange Security - Exchange On-Premises","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises","microsoftsentinelcommunity","azure-sentinel-solution-exchangesecurityinsights","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ESI-ExchangeOnPremisesCollector","Microsoft","Exchange Security Insights On-Premises Collector","Connector used to push Exchange On-Premises Security configuration for Microsoft Sentinel Analysis","[{""title"": ""1. Install the ESI Collector Script on a server with Exchange Admin PowerShell console"", ""description"": ""This is the script that will collect Exchange Information to push content in Microsoft Sentinel.\n "", ""instructions"": [{""parameters"": {""title"": ""Script Deployment"", ""instructionSteps"": [{""title"": ""Download the latest version of ESI Collector"", ""description"": ""The latest version can be found here : https://aka.ms/ESI-ExchangeCollector-Script. The file to download is CollectExchSecIns.zip""}, {""title"": ""Copy the script folder"", ""description"": ""Unzip the content and copy the script folder on a server where Exchange PowerShell Cmdlets are present.""}, {""title"": ""Unblock the PS1 Scripts"", ""description"": ""Click right on each PS1 Script and go to Properties tab.\n If the script is marked as blocked, unblock it. You can also use the Cmdlet 'Unblock-File *.* in the unzipped folder using PowerShell.""}, {""title"": ""Configure Network Access "", ""description"": ""Ensure that the script can contact Azure Analytics (*.ods.opinsights.azure.com).""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the ESI Collector Script"", ""description"": ""Be sure to be local administrator of the server.\nIn 'Run as Administrator' mode, launch the 'setup.ps1' script to configure the collector.\n Fill the Log Analytics (Microsoft Sentinel) Workspace information.\n Fill the Environment name or leave empty. By default, choose 'Def' as Default analysis. The other choices are for specific usage."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Schedule the ESI Collector Script (If not done by the Install Script due to lack of permission or ignored during installation)"", ""description"": ""The script needs to be scheduled to send Exchange configuration to Microsoft Sentinel.\n We recommend to schedule the script once a day.\n The account used to launch the Script needs to be member of the group Organization Management""}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. Parsers are automatically deployed with the solution. Follow the steps to create the Kusto Functions alias : [**ExchangeAdminAuditLogs**](https://aka.ms/sentinel-ESI-ExchangeCollector-ExchangeAdminAuditLogs-parser)"", ""instructions"": [{""parameters"": {""title"": ""Parsers are automatically deployed during Solution deployment. If you want to deploy manually, follow the steps below"", ""instructionSteps"": [{""title"": ""Manual Parser Deployment"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""1. Download the Parser file"", ""description"": ""The latest version of the file [**ExchangeAdminAuditLogs**](https://aka.ms/sentinel-ESI-ExchangeCollector-ExchangeAdminAuditLogs-parser)""}, {""title"": ""2. Create Parser **ExchangeAdminAuditLogs** function"", ""description"": ""In 'Logs' explorer of your Microsoft Sentinel's log analytics, copy the content of the file to Log explorer""}, {""title"": ""3. Save Parser **ExchangeAdminAuditLogs** function"", ""description"": ""Click on save button.\n No parameter is needed for this parser.\nClick save again.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Service Account with Organization Management role"", ""description"": ""The service Account that launch the script as scheduled task needs to be Organization Management to be able to retrieve all the needed security Information.""}, {""name"": ""Detailled documentation"", ""description"": "">**NOTE:** Detailled documentation on Installation procedure and usage can be found [here](https://aka.ms/MicrosoftExchangeSecurityGithub)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises/Data%20Connectors/ESI-ExchangeOnPremisesCollector.json","true" +"Event","Microsoft Exchange Security - Exchange On-Premises","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises","microsoftsentinelcommunity","azure-sentinel-solution-exchangesecurityinsights","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ESI-Opt1ExchangeAdminAuditLogsByEventLogs","Microsoft","Microsoft Exchange Admin Audit Logs by Event Logs","[Option 1] - Using Azure Monitor Agent - You can stream all Exchange Audit events from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you to view dashboards, create custom alerts, and improve investigation. This is used by Microsoft Exchange Security Workbooks to provide security insights of your On-Premises Exchange environment","[{""description"": "">**NOTE:** This solution is based on options. This allows you to choose which data will be ingest as some options can generate a very high volume of data. Depending on what you want to collect, track in your Workbooks, Analytics Rules, Hunting capabilities you will choose the option(s) you will deploy. Each options are independant for one from the other. To learn more about each option: ['Microsoft Exchange Security' wiki](https://aka.ms/ESI_DataConnectorOptions)\n\n>This Data Connector is the **option 1** of the wiki.""}, {""title"": ""1. Download and install the agents needed to collect logs for Microsoft Sentinel"", ""description"": ""Type of servers (Exchange Servers, Domain Controllers linked to Exchange Servers or all Domain Controllers) depends on the option you want to deploy."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Deploy Monitor Agents"", ""description"": ""This step is required only if it's the first time you onboard your Exchange Servers/Domain Controllers\n**Deploy the Azure Arc Agent**\n> [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. [Option 1] MS Exchange Management Log collection - MS Exchange Admin Audit event logs by Data Collection Rules"", ""description"": ""The MS Exchange Admin Audit event logs are collected using Data Collection Rules (DCR) and allow to store all Administrative Cmdlets executed in an Exchange environment."", ""instructions"": [{""parameters"": {""title"": ""DCR"", ""instructionSteps"": [{""title"": ""Data Collection Rules Deployment"", ""description"": ""**Enable data collection rule**\n> Microsoft Exchange Admin Audit Events logs are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template (Prefered)"", ""description"": ""Use this method for automated deployment of the DCR.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption1-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace Name** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCR, Type Event log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'Windows Event logs' and select 'Custom' option, enter 'MSExchange Management' as expression and Add it.\n6. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. Parsers are automatically deployed with the solution. Follow the steps to create the Kusto Functions alias : [**ExchangeAdminAuditLogs**](https://aka.ms/sentinel-ESI-ExchangeCollector-ExchangeAdminAuditLogs-parser)"", ""instructions"": [{""parameters"": {""title"": ""Parsers are automatically deployed during Solution deployment. If you want to deploy manually, follow the steps below"", ""instructionSteps"": [{""title"": ""Manual Parser Deployment"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""1. Download the Parser file"", ""description"": ""The latest version of the file [**ExchangeAdminAuditLogs**](https://aka.ms/sentinel-ESI-ExchangeCollector-ExchangeAdminAuditLogs-parser)""}, {""title"": ""2. Create Parser **ExchangeAdminAuditLogs** function"", ""description"": ""In 'Logs' explorer of your Microsoft Sentinel's log analytics, copy the content of the file to Log explorer""}, {""title"": ""3. Save Parser **ExchangeAdminAuditLogs** function"", ""description"": ""Click on save button.\n No parameter is needed for this parser.\nClick save again.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""Azure Log Analytics will be deprecated, to collect data from non-Azure VMs, Azure Arc is recommended. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""name"": ""Detailled documentation"", ""description"": "">**NOTE:** Detailled documentation on Installation procedure and usage can be found [here](https://aka.ms/MicrosoftExchangeSecurityGithub)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises/Data%20Connectors/ESI-Opt1ExchangeAdminAuditLogsByEventLogs.json","true" +"Event","Microsoft Exchange Security - Exchange On-Premises","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises","microsoftsentinelcommunity","azure-sentinel-solution-exchangesecurityinsights","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ESI-Opt2ExchangeServersEventLogs","Microsoft","Microsoft Exchange Logs and Events","[Option 2] - Using Azure Monitor Agent - You can stream all Exchange Security & Application Event logs from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you to create custom alerts, and improve investigation.","[{""description"": "">**NOTE:** This solution is based on options. This allows you to choose which data will be ingest as some options can generate a very high volume of data. Depending on what you want to collect, track in your Workbooks, Analytics Rules, Hunting capabilities you will choose the option(s) you will deploy. Each options are independant for one from the other. To learn more about each option: ['Microsoft Exchange Security' wiki](https://aka.ms/ESI_DataConnectorOptions)\n\n>This Data Connector is the **option 2** of the wiki.""}, {""title"": ""1. Download and install the agents needed to collect logs for Microsoft Sentinel"", ""description"": ""Type of servers (Exchange Servers, Domain Controllers linked to Exchange Servers or all Domain Controllers) depends on the option you want to deploy."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Deploy Monitor Agents"", ""description"": ""This step is required only if it's the first time you onboard your Exchange Servers/Domain Controllers\n**Deploy the Azure Arc Agent**\n> [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. [Option 2] Security/Application/System logs of Exchange Servers"", ""description"": ""The Security/Application/System logs of Exchange Servers are collected using Data Collection Rules (DCR)."", ""instructions"": [{""parameters"": {""title"": ""Security Event log collection"", ""instructionSteps"": [{""title"": ""Data Collection Rules - Security Event logs"", ""description"": ""**Enable data collection rule for Security Logs**\nSecurity Events logs are collected only from **Windows** agents.\n1. Add Exchange Servers on *Resources* tab.\n2. Select Security log level\n\n> **Common level** is the minimum required. Please select 'Common' or 'All Security Events' on DCR definition."", ""instructions"": [{""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 0}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""title"": ""Application and System Event log collection"", ""instructionSteps"": [{""title"": ""Enable data collection rule"", ""description"": ""> Application and System Events logs are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template (Prefered method)"", ""description"": ""Use this method for automated deployment of the DCR.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace Name** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCR, Type Event log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'Windows Event logs' and select 'Basic' option.\n6. For Application, select 'Critical', 'Error' and 'Warning'. For System, select Critical/Error/Warning/Information. \n7. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Log Analytics will be deprecated"", ""description"": ""Azure Log Analytics will be deprecated, to collect data from non-Azure VMs, Azure Arc is recommended. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""name"": ""Detailled documentation"", ""description"": "">**NOTE:** Detailled documentation on Installation procedure and usage can be found [here](https://aka.ms/MicrosoftExchangeSecurityGithub)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises/Data%20Connectors/ESI-Opt2ExchangeServersEventLogs.json","true" +"SecurityEvent","Microsoft Exchange Security - Exchange On-Premises","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises","microsoftsentinelcommunity","azure-sentinel-solution-exchangesecurityinsights","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ESI-Opt34DomainControllersSecurityEventLogs","Microsoft"," Microsoft Active-Directory Domain Controllers Security Event Logs","[Option 3 & 4] - Using Azure Monitor Agent -You can stream a part or all Domain Controllers Security Event logs from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you to create custom alerts, and improve investigation.","[{""description"": "">**NOTE:** This solution is based on options. This allows you to choose which data will be ingest as some options can generate a very high volume of data. Depending on what you want to collect, track in your Workbooks, Analytics Rules, Hunting capabilities you will choose the option(s) you will deploy. Each options are independant for one from the other. To learn more about each option: ['Microsoft Exchange Security' wiki](https://aka.ms/ESI_DataConnectorOptions)\n\n>This Data Connector is the **option 3 and 4** of the wiki.""}, {""title"": ""1. Download and install the agents needed to collect logs for Microsoft Sentinel"", ""description"": ""Type of servers (Exchange Servers, Domain Controllers linked to Exchange Servers or all Domain Controllers) depends on the option you want to deploy."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Deploy Monitor Agents"", ""description"": ""This step is required only if it's the first time you onboard your Exchange Servers/Domain Controllers\n**Deploy the Azure Arc Agent**\n> [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Security logs of Domain Controllers"", ""description"": ""Select how to stream Security logs of Domain Controllers. If you want to implement Option 3, you just need to select DC on same site as Exchange Servers. If you want to implement Option 4, you can select all DCs of your forest."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""[Option 3] List only Domain Controllers on the same site as Exchange Servers for next step"", ""description"": ""**This limits the quantity of data injested but some incident can't be detected.**""}, {""title"": ""[Option 4] List all Domain Controllers of your Active-Directory Forest for next step"", ""description"": ""**This allows collecting all security events**""}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""title"": ""Security Event log collection"", ""instructionSteps"": [{""title"": ""Data Collection Rules - Security Event logs"", ""description"": ""**Enable data collection rule for Security Logs**\nSecurity Events logs are collected only from **Windows** agents.\n1. Add chosen DCs on *Resources* tab.\n2. Select Security log level\n\n> **Common level** is the minimum required. Please select 'Common' or 'All Security Events' on DCR definition."", ""instructions"": [{""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 0}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""Azure Log Analytics will be deprecated, to collect data from non-Azure VMs, Azure Arc is recommended. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""name"": ""Detailled documentation"", ""description"": "">**NOTE:** Detailled documentation on Installation procedure and usage can be found [here](https://aka.ms/MicrosoftExchangeSecurityGithub)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises/Data%20Connectors/ESI-Opt34DomainControllersSecurityEventLogs.json","true" +"W3CIISLog","Microsoft Exchange Security - Exchange On-Premises","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises","microsoftsentinelcommunity","azure-sentinel-solution-exchangesecurityinsights","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ESI-Opt5ExchangeIISLogs","Microsoft","IIS Logs of Microsoft Exchange Servers","[Option 5] - Using Azure Monitor Agent - You can stream all IIS Logs from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you to create custom alerts, and improve investigation.","[{""description"": "">**NOTE:** This solution is based on options. This allows you to choose which data will be ingest as some options can generate a very high volume of data. Depending on what you want to collect, track in your Workbooks, Analytics Rules, Hunting capabilities you will choose the option(s) you will deploy. Each options are independant for one from the other. To learn more about each option: ['Microsoft Exchange Security' wiki](https://aka.ms/ESI_DataConnectorOptions)\n\n>This Data Connector is the **option 5** of the wiki.""}, {""title"": ""1. Download and install the agents needed to collect logs for Microsoft Sentinel"", ""description"": ""Type of servers (Exchange Servers, Domain Controllers linked to Exchange Servers or all Domain Controllers) depends on the option you want to deploy."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Deploy Monitor Agents"", ""description"": ""This step is required only if it's the first time you onboard your Exchange Servers/Domain Controllers\n**Deploy the Azure Arc Agent**\n> [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""[Option 5] IIS logs of Exchange Servers"", ""description"": ""Select how to stream IIS logs of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Enable data collection rule"", ""description"": ""> IIS logs are collected only from **Windows** agents."", ""instructions"": [{""type"": ""AdminAuditEvents""}, {""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template (Preferred Method)"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption5-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create DCR, Type IIS log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields, Select Windows as platform type and give a name to the DCR. Select the created DCE. \n4. In the **Resources** tab, enter you Exchange Servers.\n5. In 'Collect and deliver', add a Data Source type 'IIS logs' (Do not enter a path if IIS Logs path is configured by default). Click on 'Add data source'\n6. 'Make other preferable configuration changes', if needed, then click **Create**.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""Azure Log Analytics will be deprecated, to collect data from non-Azure VMs, Azure Arc is recommended. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""name"": ""Detailled documentation"", ""description"": "">**NOTE:** Detailled documentation on Installation procedure and usage can be found [here](https://aka.ms/MicrosoftExchangeSecurityGithub)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises/Data%20Connectors/ESI-Opt5ExchangeIISLogs.json","true" +"MessageTrackingLog_CL","Microsoft Exchange Security - Exchange On-Premises","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises","microsoftsentinelcommunity","azure-sentinel-solution-exchangesecurityinsights","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ESI-Opt6ExchangeMessageTrackingLogs","Microsoft","Microsoft Exchange Message Tracking Logs","[Option 6] - Using Azure Monitor Agent - You can stream all Exchange Message Tracking from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. Those logs can be used to track the flow of messages in your Exchange environment. This data connector is based on the option 6 of the [Microsoft Exchange Security wiki](https://aka.ms/ESI_DataConnectorOptions).","[{""description"": "">**NOTE:** This solution is based on options. This allows you to choose which data will be ingest as some options can generate a very high volume of data. Depending on what you want to collect, track in your Workbooks, Analytics Rules, Hunting capabilities you will choose the option(s) you will deploy. Each options are independant for one from the other. To learn more about each option: ['Microsoft Exchange Security' wiki](https://aka.ms/ESI_DataConnectorOptions)\n\n>This Data Connector is the **option 6** of the wiki.""}, {""title"": ""1. Download and install the agents needed to collect logs for Microsoft Sentinel"", ""description"": ""Type of servers (Exchange Servers, Domain Controllers linked to Exchange Servers or all Domain Controllers) depends on the option you want to deploy."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Deploy Monitor Agents"", ""description"": ""This step is required only if it's the first time you onboard your Exchange Servers/Domain Controllers\n**Deploy the Azure Arc Agent**\n> [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Message Tracking of Exchange Servers"", ""description"": ""Select how to stream Message Tracking of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Message Tracking are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule and Custom Table"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption6-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Create Custom Table - Explanation"", ""description"": ""The Custom Table can't be created using the Azure Portal. You need to use an ARM template, a PowerShell Script or another method [described here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/create-custom-table?tabs=azure-powershell-1%2Cazure-portal-2%2Cazure-portal-3#create-a-custom-table).""}, {""title"": ""Create Custom Table using an ARM Template"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-MessageTrackingCustomTable)\n2. Select the preferred **Subscription**, **Resource Group**, **Location** and **Analytic Workspace Name**. \n3. Click **Create** to deploy.""}, {""title"": ""Create Custom Table using PowerShell in Cloud Shell"", ""description"": ""1. From the Azure Portal, open a Cloud Shell.\n2. Copy and paste and Execute the following script in the Cloud Shell to create the table.\n\t\t$tableParams = @'\n\t\t{\n\t\t\t\""properties\"": {\n\t\t\t\t\""schema\"": {\n\t\t\t\t\t \""name\"": \""MessageTrackingLog_CL\"",\n\t\t\t\t\t \""columns\"": [\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""directionality\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""reference\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""source\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""TimeGenerated\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""datetime\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""clientHostname\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""clientIP\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""connectorId\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""customData\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""eventId\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""internalMessageId\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""logId\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""messageId\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""messageInfo\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""messageSubject\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""networkMessageId\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""originalClientIp\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""originalServerIp\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""recipientAddress\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""recipientCount\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""recipientStatus\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""relatedRecipientAddress\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""returnPath\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""senderAddress\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""senderHostname\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""serverIp\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""sourceContext\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""schemaVersion\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""messageTrackingTenantId\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""totalBytes\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""transportTrafficType\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""FilePath\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t]\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\t'@\n3. Copy, Replace, Paste and execute the following parameters with your own values:\n\t\t$SubscriptionID = 'YourGUID'\n\t\t$ResourceGroupName = 'YourResourceGroupName'\n\t\t$WorkspaceName = 'YourWorkspaceName'\n4. Execute the Following Cmdlet to create the table:\n\t\tInvoke-AzRestMethod -Path \""/subscriptions/$SubscriptionID/resourcegroups/$ResourceGroupName/providers/microsoft.operationalinsights/workspaces/$WorkspaceName/tables/MessageTrackingLog_CL?api-version=2021-12-01-preview\"" -Method PUT -payload $tableParams""}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE, like ESI-ExchangeServers. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create a DCR, Type Custom log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click on 'Create' button.\n3. On 'Basics' tab, fill the Rule name like **DCR-Option6-MessageTrackingLogs**, select the 'Data Collection Endpoint' with the previously created endpoint and fill other parameters.\n4. In the **Resources** tab, add your Exchange Servers.\n5. In **Collect and Deliver**, add a Data Source type 'Custom Text logs' and enter 'C:\\Program Files\\Microsoft\\Exchange Server\\V15\\TransportRoles\\Logs\\MessageTracking\\*.log' in file pattern, 'MessageTrackingLog_CL' in Table Name.\n6.in Transform field, enter the following KQL request :\n\t\tsource | extend d = split(RawData,',') | extend TimeGenerated =todatetime(d[0]) ,clientIP =tostring(d[1]) ,clientHostname =tostring(d[2]) ,serverIp=tostring(d[3]) ,senderHostname=tostring(d[4]) ,sourceContext=tostring(d[5]) ,connectorId =tostring(d[6]) ,source=tostring(d[7]) ,eventId =tostring(d[8]) ,internalMessageId =tostring(d[9]) ,messageId =tostring(d[10]) ,networkMessageId =tostring(d[11]) ,recipientAddress=tostring(d[12]) ,recipientStatus=tostring(d[13]) ,totalBytes=tostring(d[14]) ,recipientCount=tostring(d[15]) ,relatedRecipientAddress=tostring(d[16]) ,reference=tostring(d[17]) ,messageSubject =tostring(d[18]) ,senderAddress=tostring(d[19]) ,returnPath=tostring(d[20]) ,messageInfo =tostring(d[21]) ,directionality=tostring(d[22]) ,messageTrackingTenantId =tostring(d[23]) ,originalClientIp =tostring(d[24]) ,originalServerIp =tostring(d[25]) ,customData=tostring(d[26]) ,transportTrafficType =tostring(d[27]) ,logId =tostring(d[28]) ,schemaVersion=tostring(d[29]) | project-away d,RawData\n and click on 'Destination'.\n6. In 'Destination', add a destination and select the Workspace where you have previously created the Custom Table \n7. Click on 'Add data source'.\n8. Fill other required parameters and tags and create the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Log Analytics will be deprecated"", ""description"": ""Azure Log Analytics will be deprecated, to collect data from non-Azure VMs, Azure Arc is recommended. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""name"": ""Detailled documentation"", ""description"": "">**NOTE:** Detailled documentation on Installation procedure and usage can be found [here](https://aka.ms/MicrosoftExchangeSecurityGithub)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises/Data%20Connectors/ESI-Opt6ExchangeMessageTrackingLogs.json","true" +"ExchangeHttpProxy_CL","Microsoft Exchange Security - Exchange On-Premises","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises","microsoftsentinelcommunity","azure-sentinel-solution-exchangesecurityinsights","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ESI-Opt7ExchangeHTTPProxyLogs","Microsoft","Microsoft Exchange HTTP Proxy Logs","[Option 7] - Using Azure Monitor Agent - You can stream HTTP Proxy logs and Security Event logs from the Windows machines connected to your Microsoft Sentinel workspace using the Windows agent. This connection enables you create custom alerts, and improve investigation. [Learn more](https://aka.ms/ESI_DataConnectorOptions)","[{""description"": "">**NOTE:** This solution is based on options. This allows you to choose which data will be ingest as some options can generate a very high volume of data. Depending on what you want to collect, track in your Workbooks, Analytics Rules, Hunting capabilities you will choose the option(s) you will deploy. Each options are independant for one from the other. To learn more about each option: ['Microsoft Exchange Security' wiki](https://aka.ms/ESI_DataConnectorOptions)\n\n>This Data Connector is the **option 7** of the wiki.""}, {""title"": ""1. Download and install the agents needed to collect logs for Microsoft Sentinel"", ""description"": ""Type of servers (Exchange Servers, Domain Controllers linked to Exchange Servers or all Domain Controllers) depends on the option you want to deploy."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Deploy Monitor Agents"", ""description"": ""This step is required only if it's the first time you onboard your Exchange Servers/Domain Controllers\n**Deploy the Azure Arc Agent**\n> [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. [Option 7] HTTP Proxy of Exchange Servers"", ""description"": ""Select how to stream HTTP Proxy of Exchange Servers"", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Data Collection Rules - When Azure Monitor Agent is used"", ""description"": ""**Enable data collection rule**\n> Message Tracking are collected only from **Windows** agents."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template (Prefered Method)"", ""description"": ""Use this method for automated deployment of the DCE and DCR."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCEExchangeServers)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. You can change the proposed name of the DCE.\n5. Click **Create** to deploy.""}, {""title"": ""B. Deploy Data Connection Rule"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-DCROption7-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID** 'and/or Other required fields'.\n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Option 2 - Manual Deployment of Azure Automation"", ""description"": ""Use the following step-by-step instructions to deploy manually a Data Collection Rule."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Create Custom Table - Explanation"", ""description"": ""The Custom Table can't be created using the Azure Portal. You need to use an ARM template, a PowerShell Script or another method [described here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/create-custom-table?tabs=azure-powershell-1%2Cazure-portal-2%2Cazure-portal-3#create-a-custom-table).""}, {""title"": ""Create Custom Table using an ARM Template"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-HTTPProxyCustomTable)\n2. Select the preferred **Subscription**, **Resource Group**, **Location** and **Analytic Workspace Name**. \n3. Click **Create** to deploy.""}, {""title"": ""Create Custom Table using PowerShell in Cloud Shell"", ""description"": ""1. From the Azure Portal, open a Cloud Shell.\n2. Copy and paste and Execute the following script in the Cloud Shell to create the table.\n\t\t$tableParams = @'\n\t\t{\n\t\t\t\""properties\"": {\n\t\t\t\t \""schema\"": {\n\t\t\t\t\t\t\""name\"": \""ExchangeHttpProxy_CL\"",\n\t\t\t\t\t\t\""columns\"": [\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""AccountForestLatencyBreakup\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""ActivityContextLifeTime\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""ADLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""AnchorMailbox\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""AuthenticatedUser\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""AuthenticationType\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""AuthModulePerfContext\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""BackEndCookie\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""BackEndGenericInfo\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""BackendProcessingLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""BackendReqInitLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""BackendReqStreamLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""BackendRespInitLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""BackendRespStreamLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""BackEndStatus\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""BuildVersion\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""CalculateTargetBackEndLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""ClientIpAddress\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""ClientReqStreamLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""ClientRequestId\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""ClientRespStreamLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""CoreLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""DatabaseGuid\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""EdgeTraceId\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""ErrorCode\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""GenericErrors\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""GenericInfo\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""GlsLatencyBreakup\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""HandlerCompletionLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""HandlerToModuleSwitchingLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""HttpPipelineLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""HttpProxyOverhead\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""HttpStatus\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""IsAuthenticated\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""KerberosAuthHeaderLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""MajorVersion\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""Method\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""MinorVersion\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""ModuleToHandlerSwitchingLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""Organization\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""PartitionEndpointLookupLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""Protocol\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""ProtocolAction\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""ProxyAction\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""ProxyTime\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""RequestBytes\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""RequestHandlerLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""RequestId\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""ResourceForestLatencyBreakup\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""ResponseBytes\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""RevisionVersion\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""RouteRefresherLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""RoutingHint\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""RoutingLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""RoutingStatus\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""RoutingType\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""ServerHostName\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""ServerLocatorHost\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""ServerLocatorLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""SharedCacheLatencyBreakup\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""TargetOutstandingRequests\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""TargetServer\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""TargetServerVersion\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""TotalAccountForestLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""TotalGlsLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""TotalRequestTime\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""TotalResourceForestLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""TotalSharedCacheLatency\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""UrlHost\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""UrlQuery\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""UrlStem\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""UserADObjectGuid\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""UserAgent\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""TimeGenerated\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""datetime\""\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\""name\"": \""FilePath\"",\n\t\t\t\t\t\t\t\t\t\""type\"": \""string\""\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t]\n\t\t\t\t }\n\t\t\t }\n\t\t }\n\t\t '@\n3. Copy, Replace, Paste and execute the following parameters with your own values:\n\t\t$SubscriptionID = 'YourGUID'\n\t\t$ResourceGroupName = 'YourResourceGroupName'\n\t\t$WorkspaceName = 'YourWorkspaceName'\n4. Execute the Following Cmdlet to create the table:\n\t\tInvoke-AzRestMethod -Path \""/subscriptions/$SubscriptionID/resourcegroups/$ResourceGroupName/providers/microsoft.operationalinsights/workspaces/$WorkspaceName/tables/ExchangeHttpProxy_CL?api-version=2021-12-01-preview\"" -Method PUT -payload $tableParams""}]}, ""type"": ""InstructionStepsGroup""}, {""parameters"": {""instructionSteps"": [{""title"": ""A. Create DCE (If not already created for Exchange Servers)"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection Endpoint](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionEndpoints).\n2. Click **+ Create** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the DCE. \n3. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Create a DCR, Type Custom log"", ""description"": ""1. From the Azure Portal, navigate to [Azure Data collection rules](https://portal.azure.com/#view/Microsoft_Azure_Monitoring/AzureMonitoringBrowseBlade/~/dataCollectionRules).\n2. Click on 'Create' button.\n3. On 'Basics' tab, fill the Rule name like **DCR-Option7-HTTPProxyLogs**, select the 'Data Collection Endpoint' with the previously created endpoint and fill other parameters.\n4. In the **Resources** tab, add your Exchange Servers.\n5. In **Collect and Deliver**, add a Data Source type 'Custom Text logs' and enter the following file pattern : \n\t\t'C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Autodiscover\\*.log','C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Eas\\*.log','C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Ecp\\*.log','C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Ews\\*.log','C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Mapi\\*.log','C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Oab\\*.log','C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\Owa\\*.log','C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\OwaCalendar\\*.log','C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\PowerShell\\*.log','C:\\Program Files\\Microsoft\\Exchange Server\\V15\\Logging\\HttpProxy\\RpcHttp\\*.log'\n6. Put 'ExchangeHttpProxy_CL' in Table Name.\n7. in Transform field, enter the following KQL request :\n\t\tsource | extend d = split(RawData,',') | extend DateTime=todatetime(d[0]),RequestId=tostring(d[1]) ,MajorVersion=tostring(d[2]) ,MinorVersion=tostring(d[3]) ,BuildVersion=tostring(d[4]) ,RevisionVersion=tostring(d[5]) ,ClientRequestId=tostring(d[6]) ,Protocol=tostring(d[7]) ,UrlHost=tostring(d[8]) ,UrlStem=tostring(d[9]) ,ProtocolAction=tostring(d[10]) ,AuthenticationType=tostring(d[11]) ,IsAuthenticated=tostring(d[12]) ,AuthenticatedUser=tostring(d[13]) ,Organization=tostring(d[14]) ,AnchorMailbox=tostring(d[15]) ,UserAgent=tostring(d[16]) ,ClientIpAddress=tostring(d[17]) ,ServerHostName=tostring(d[18]) ,HttpStatus=tostring(d[19]) ,BackEndStatus=tostring(d[20]) ,ErrorCode=tostring(d[21]) ,Method=tostring(d[22]) ,ProxyAction=tostring(d[23]) ,TargetServer=tostring(d[24]) ,TargetServerVersion=tostring(d[25]) ,RoutingType=tostring(d[26]) ,RoutingHint=tostring(d[27]) ,BackEndCookie=tostring(d[28]) ,ServerLocatorHost=tostring(d[29]) ,ServerLocatorLatency=tostring(d[30]) ,RequestBytes=tostring(d[31]) ,ResponseBytes=tostring(d[32]) ,TargetOutstandingRequests=tostring(d[33]) ,AuthModulePerfContext=tostring(d[34]) ,HttpPipelineLatency=tostring(d[35]) ,CalculateTargetBackEndLatency=tostring(d[36]) ,GlsLatencyBreakup=tostring(d[37]) ,TotalGlsLatency=tostring(d[38]) ,AccountForestLatencyBreakup=tostring(d[39]) ,TotalAccountForestLatency=tostring(d[40]) ,ResourceForestLatencyBreakup=tostring(d[41]) ,TotalResourceForestLatency=tostring(d[42]) ,ADLatency=tostring(d[43]) ,SharedCacheLatencyBreakup=tostring(d[44]) ,TotalSharedCacheLatency=tostring(d[45]) ,ActivityContextLifeTime=tostring(d[46]) ,ModuleToHandlerSwitchingLatency=tostring(d[47]) ,ClientReqStreamLatency=tostring(d[48]) ,BackendReqInitLatency=tostring(d[49]) ,BackendReqStreamLatency=tostring(d[50]) ,BackendProcessingLatency=tostring(d[51]) ,BackendRespInitLatency=tostring(d[52]) ,BackendRespStreamLatency=tostring(d[53]) ,ClientRespStreamLatency=tostring(d[54]) ,KerberosAuthHeaderLatency=tostring(d[55]) ,HandlerCompletionLatency=tostring(d[56]) ,RequestHandlerLatency=tostring(d[57]) ,HandlerToModuleSwitchingLatency=tostring(d[58]) ,ProxyTime=tostring(d[59]) ,CoreLatency=tostring(d[60]) ,RoutingLatency=tostring(d[61]) ,HttpProxyOverhead=tostring(d[62]) ,TotalRequestTime=tostring(d[63]) ,RouteRefresherLatency=tostring(d[64]) ,UrlQuery=tostring(d[65]) ,BackEndGenericInfo=tostring(d[66]) ,GenericInfo=tostring(d[67]) ,GenericErrors=tostring(d[68]) ,EdgeTraceId=tostring(d[69]) ,DatabaseGuid=tostring(d[70]) ,UserADObjectGuid=tostring(d[71]) ,PartitionEndpointLookupLatency=tostring(d[72]) ,RoutingStatus=tostring(d[73]) | extend TimeGenerated = DateTime | project-away d,RawData,DateTime | project-away d,RawData,DateTime\n and click on 'Destination'.\n8. In 'Destination', add a destination and select the Workspace where you have previously created the Custom Table \n9. Click on 'Add data source'.\n10. Fill other required parameters and tags and create the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Assign the DCR to all Exchange Servers"", ""description"": ""Add all your Exchange Servers to the DCR""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Log Analytics will be deprecated"", ""description"": ""Azure Log Analytics will be deprecated, to collect data from non-Azure VMs, Azure Arc is recommended. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""name"": ""Detailled documentation"", ""description"": "">**NOTE:** Detailled documentation on Installation procedure and usage can be found [here](https://aka.ms/MicrosoftExchangeSecurityGithub)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20On-Premises/Data%20Connectors/ESI-Opt7ExchangeHTTPProxyLogs.json","true" +"ESIExchangeOnlineConfig_CL","Microsoft Exchange Security - Exchange Online","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20Online","microsoftsentinelcommunity","azure-sentinel-solution-esionline","2022-12-21","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","ESI-ExchangeOnlineCollector","Microsoft","Exchange Security Insights Online Collector","Connector used to push Exchange Online Security configuration for Microsoft Sentinel Analysis","[{""description"": "">**NOTE - UPDATE**"", ""instructions"": [{""parameters"": {""text"": ""

NOTE - UPDATE:

We recommend to Update the Collector to Version 7.6.0.0 or highier.
The Collector Script Update procedure could be found here : ESI Online Collector Update"", ""visible"": true, ""inline"": false}, ""type"": ""InfoMessage""}]}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. Follow the steps for each Parser to create the Kusto Functions alias : [**ExchangeConfiguration**](https://aka.ms/sentinel-ESI-ExchangeConfiguration-Online-parser) and [**ExchangeEnvironmentList**](https://aka.ms/sentinel-ESI-ExchangeEnvironmentList-Online-parser) \n\n**STEP 1 - Parsers deployment**"", ""instructions"": [{""parameters"": {""title"": ""Parser deployment (When using Microsoft Exchange Security Solution, Parsers are automatically deployed)"", ""instructionSteps"": [{""title"": ""1. Download the Parser files"", ""description"": ""The latest version of the 2 files [**ExchangeConfiguration.yaml**](https://aka.ms/sentinel-ESI-ExchangeConfiguration-Online-parser) and [**ExchangeEnvironmentList.yaml**](https://aka.ms/sentinel-ESI-ExchangeEnvironmentList-Online-parser)""}, {""title"": ""2. Create Parser **ExchangeConfiguration** function"", ""description"": ""In 'Logs' explorer of your Microsoft Sentinel's log analytics, copy the content of the file to Log explorer""}, {""title"": ""3. Save Parser **ExchangeConfiguration** function"", ""description"": ""Click on save button.\n Define the parameters as asked on the header of the parser file.\nClick save again.""}, {""title"": ""4. Reproduce the same steps for Parser **ExchangeEnvironmentList**"", ""description"": ""Reproduce the step 2 and 3 with the content of 'ExchangeEnvironmentList.yaml' file""}]}, ""type"": ""InstructionStepsGroup""}]}, {""description"": "">**NOTE:** This connector uses Azure Automation to connect to 'Exchange Online' to pull its Security analysis into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Automation pricing page](https://azure.microsoft.com/pricing/details/automation/) for details.""}, {""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Automation**\n\n>**IMPORTANT:** Before deploying the 'ESI Exchange Online Security Configuration' connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Exchange Online tenant name (contoso.onmicrosoft.com), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""**Option 1 - Azure Resource Manager (ARM) Template**\n\nUse this method for automated deployment of the 'ESI Exchange Online Security Configuration' connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ESI-ExchangeCollector-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **Tenant Name**, 'and/or Other required fields'. \n>4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""description"": ""**Option 2 - Manual Deployment of Azure Automation**\n\n Use the following step-by-step instructions to deploy the 'ESI Exchange Online Security Configuration' connector manually with Azure Automation."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""A. Create the Azure Automation Account"", ""description"": ""1. From the Azure Portal, navigate to [Azure Automation Account](https://portal.azure.com/#view/HubsExtension/BrowseResource/resourceType/Microsoft.Automation%2FAutomationAccounts).\n2. Click **+ Add** at the top.\n3. In the **Basics** tab, fill the required fields and give a name to the Azure Automation. \n4. In the **Advanced** and **Networking** and **Tags** Tabs, leave fields as default if you don't need to customize them.\n5. 'Make other preferable configuration changes', if needed, then click **Create**.""}, {""title"": ""B. Add Exchange Online Management Module, Microsoft Graph (Authentication, User and Group) Modules"", ""description"": ""1. On the Automation Account page, select **Modules**.\n2. Click on **Browse gallery** and search the **ExchangeOnlineManagement** module.\n3. Select it and click on **Select**.\n4. Choose Version **5.1** on Runtime version field and click on Import button.\nRepeat the step for the following modules : 'Microsoft.Graph.Authentication', 'Microsoft.Graph.Users' and 'Microsoft.Graph.Groups. **Attention, you need to wait for Microsoft.Graph.Authentication installation before processing next modules**""}, {""title"": ""C. Download the Runbook Content"", ""description"": ""1. Download the latest version of ESI Collector. The latest version can be found here : https://aka.ms/ESI-ExchangeCollector-Script\n2. Unzip the file to find the JSON file and the PS1 file for next step.\n""}, {""title"": ""D. Create Runbook"", ""description"": ""1. On the Automation Account page, select the **Runbooks** button.\n2. Click on **Create a runbook** and name it like 'ESI-Collector' with a runbook type **PowerShell**, Runtime Version **5.1** and click 'Create'.\n2. Import the content of the previous step's PS1 file in the Runbook window.\n3. Click on **Publish**""}, {""title"": ""E. Create GlobalConfiguration Variable"", ""description"": ""1. On the Automation Account page, select the **Variables** button.\n2. Click on **Add a Variable** and name it exaclty 'GlobalConfiguration' with a type **String**.\n2. On 'Value' field, copy the content of the previous step's JSON file.\n3. Inside the content, replace the values of **WorkspaceID** and **WorkspaceKey**.\n4. Click on 'Create' button.""}, {""title"": ""F. Create TenantName Variable"", ""description"": ""1. On the Automation Account page, select the **Variables** button.\n2. Click on **Add a Variable** and name it exaclty 'TenantName' with a type **String**.\n3. On 'Value' field, write the tenant name of your Exchange Online.\n4. Click on 'Create' button.""}, {""title"": ""G. Create LastDateTracking Variable"", ""description"": ""1. On the Automation Account page, select the **Variables** button.\n2. Click on **Add a Variable** and name it exaclty 'LastDateTracking' with a type **String**.\n3. On 'Value' field, write 'Never'.\n4. Click on 'Create' button.""}, {""title"": ""H. Create a Runbook Schedule"", ""description"": ""1. On the Automation Account page, select the **Runbook** button and click on your created runbook.\n2. Click on **Schedules** and **Add a schedule** button.\n3. Click on **Schedule**, **Add a Schedule** and name it. Select **Recurring** value with a reccurence of every 1 day, click 'Create'.\n4. Click on 'Configure parameters and run settings'. Leave all empty and click on **OK** and **OK** again.""}]}, ""type"": ""InstructionStepsGroup""}]}, {""description"": ""**STEP 3 - Assign Microsoft Graph Permission and Exchange Online Permission to Managed Identity Account** \n\nTo be able to collect Exchange Online information and to be able to retrieve User information and memberlist of admin groups, the automation account need multiple permission."", ""instructions"": [{""parameters"": {""title"": ""Assign Permissions by Script"", ""instructionSteps"": [{""title"": ""A. Download Permission Script"", ""description"": ""[Permission Update script](https://aka.ms/ESI-ExchangeCollector-Permissions)""}, {""title"": ""B. Retrieve the Azure Automation Managed Identity GUID and insert it in the downloaded script"", ""description"": ""1. Go to your Automation Account, in the **Identity** Section. You can find the Guid of your Managed Identity.\n2. Replace the GUID in $MI_ID = \""XXXXXXXXXXX\"" with the GUID of your Managed Identity.""}, {""title"": ""C. Launch the script with a **Global-Administrator** account"", ""description"": ""**Attention this script requires MSGraph Modules and Admin Consent to access to your tenant with Microsoft Graph**.\n\tThe script will add 3 permissions to the Managed identity:\n\t1. Exchange Online ManageAsApp permission\n\t2. User.Read.All on Microsoft Graph API\n\t3. Group.Read.All on Microsoft Graph API""}, {""title"": ""D. Exchange Online Role Assignment"", ""description"": ""1. As a **Global Administrator**, go to **Roles and Administrators**.\n2. Select **Global Reader** role or **Security Reader** and click to 'Add assignments'.\n3. Click on 'No member selected' and search your Managed Identity account Name beginning by **the name of your automation account** like 'ESI-Collector'. Select it and click on 'Select'.\n4. Click **Next** and validate the assignment by clicking **Assign**.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""microsoft.automation/automationaccounts permissions"", ""description"": ""Read and write permissions to create an Azure Automation with a Runbook is required. [See the documentation to learn more about Automation Account](https://learn.microsoft.com/en-us/azure/automation/overview).""}, {""name"": ""Microsoft.Graph permissions"", ""description"": ""Groups.Read, Users.Read and Auditing.Read permissions are required to retrieve user/group information linked to Exchange Online assignments. [See the documentation to learn more](https://aka.ms/sentinel-ESI-OnlineCollectorPermissions).""}, {""name"": ""Exchange Online permissions"", ""description"": ""Exchange.ManageAsApp permission and **Global Reader** or **Security Reader** Role are needed to retrieve the Exchange Online Security Configuration.[See the documentation to learn more](https://aka.ms/sentinel-ESI-OnlineCollectorPermissions).""}, {""name"": ""(Optional) Log Storage permissions"", ""description"": ""Storage Blob Data Contributor to a storage account linked to the Automation Account Managed identity or an Application ID is mandatory to store logs.[See the documentation to learn more](https://aka.ms/sentinel-ESI-OnlineCollectorPermissions).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Exchange%20Security%20-%20Exchange%20Online/Data%20Connectors/ESI-ExchangeOnlineCollector.json","true" +"PowerBIActivity","Microsoft PowerBI","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20PowerBI","azuresentinel","azure-sentinel-solution-microsoftpowerbi","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OfficePowerBI","Microsoft","Microsoft PowerBI","Microsoft PowerBI is a collection of software services, apps, and connectors that work together to turn your unrelated sources of data into coherent, visually immersive, and interactive insights. Your data may be an Excel spreadsheet, a collection of cloud-based and on-premises hybrid data warehouses, or a data store of some other type. This connector lets you stream PowerBI audit logs into Microsoft Sentinel, allowing you to track user activities in your PowerBI environment. You can filter the audit data by date range, user, dashboard, report, dataset, and activity type.","[{""title"": ""Connect Microsoft PowerBI audit logs to Microsoft Sentinel"", ""description"": ""This connector uses the Office Management API to get your PowerBI audit logs. The logs will be stored and processed in your existing Microsoft Sentinel workspace. You can find the data in the **PowerBIActivity** table."", ""instructions"": [{""parameters"": {""connectorKind"": ""OfficePowerBI"", ""title"": ""Microsoft PowerBI"", ""enable"": true}, ""type"": ""SentinelResourceProvider""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""tenant"": [""GlobalAdmin"", ""SecurityAdmin""], ""customs"": [{""name"": ""License"", ""description"": ""Microsoft Power BI eligible license is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20PowerBI/Data%20Connectors/template_OfficePowerBI.json","true" +"","Microsoft Project","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Project","azuresentinel","azure-sentinel-solution-microsoftproject","2022-05-23","","","Microsoft","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"PurviewDataSensitivityLogs","Microsoft Purview","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Purview","azuresentinel","azure-sentinel-solution-azurepurview","2021-11-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MicrosoftAzurePurview","Microsoft","Microsoft Purview","Connect to Microsoft Purview to enable data sensitivity enrichment of Microsoft Sentinel. Data classification and sensitivity label logs from Microsoft Purview scans can be ingested and visualized through workbooks, analytical rules, and more. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2224125&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Connect Microsoft Purview to Microsoft Sentinel"", ""description"": ""Within the Azure Portal, navigate to your Purview resource:\n 1. In the search bar, search for **Purview accounts.**\n 2. Select the specific account that you would like to be set up with Sentinel.\n\nInside your Microsoft Purview resource:\n 3. Select **Diagnostic Settings.**\n 4. Select **+ Add diagnostic setting.**\n 5. In the **Diagnostic setting** blade:\n - Select the Log Category as **DataSensitivityLogEvent**.\n - Select **Send to Log Analytics**.\n - Chose the log destination workspace. This should be the same workspace that is used by **Microsoft Sentinel.**\n - Click **Save**."", ""instructions"": []}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Microsoft Purview account Owner or Contributor role to set up Diagnostic Settings. Microsoft Contributor role with write permissions to enable data connector, view workbook, and create analytic rules."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Purview/Data%20Connectors/MicrosoftPurview.json","true" +"MicrosoftPurviewInformationProtection","Microsoft Purview Information Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Purview%20Information%20Protection","azuresentinel","azure-sentinel-solution-mip","2023-01-06","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MicrosoftPurviewInformationProtection","Microsoft","Microsoft Purview Information Protection","Microsoft Purview Information Protection helps you discover, classify, protect, and govern sensitive information wherever it lives or travels. Using these capabilities enable you to know your data, identify items that are sensitive and gain visibility into how they are being used to better protect your data. Sensitivity labels are the foundational capability that provide protection actions, applying encryption, access restrictions and visual markings.
Integrate Microsoft Purview Information Protection logs with Microsoft Sentinel to view dashboards, create custom alerts and improve investigation. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2223811&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Connect Microsoft Purview Information Protection audit logs to Microsoft Sentinel"", ""instructions"": [{""parameters"": {""connectorKind"": ""MicrosoftPurviewInformationProtection"", ""title"": """", ""enable"": true}, ""type"": ""SentinelResourceProvider""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""tenant"": [""GlobalAdmin"", ""SecurityAdmin""], ""customs"": [{""name"": ""License"", ""description"": ""Enterprise Mobility + Security E5/A5 or Microsoft 365 E5/A5 or P2""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Purview%20Information%20Protection/Data%20Connectors/MicrosoftPurviewInformationProtection.json","true" +"Syslog","Microsoft Sysmon For Linux","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Sysmon%20For%20Linux","azuresentinel","azure-sentinel-solution-sysmonforlinux","2021-10-27","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MicrosoftSysmonForLinux","Microsoft","[Deprecated] Microsoft Sysmon For Linux","[Sysmon for Linux](https://github.com/Sysinternals/SysmonForLinux) provides detailed information about process creations, network connections and other system events.
[Sysmon for linux link:]. The Sysmon for Linux connector uses [Syslog](https://aka.ms/sysLogInfo) as its data ingestion method. This solution depends on ASIM to work as expected. [Deploy ASIM](https://aka.ms/DeployASIM) to get the full value from the solution.","[{""title"": """", ""description"": "">This data connector depends on ASIM parsers based on a Kusto Functions to work as expected. [Deploy the parsers](https://aka.ms/ASimSysmonForLinuxARM) \n\n The following functions will be deployed:\n\n - vimFileEventLinuxSysmonFileCreated, vimFileEventLinuxSysmonFileDeleted\n\n - vimProcessCreateLinuxSysmon, vimProcessTerminateLinuxSysmon\n\n - vimNetworkSessionLinuxSysmon \n\n[Read more](https://aka.ms/AboutASIM)"", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n2. Select **Apply below configuration to my machines** and select the facilities and severities.\n3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Sysmon%20For%20Linux/Data%20Connectors/SysmonForLinux.json","true" +"vimProcessCreateLinuxSysmon","Microsoft Sysmon For Linux","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Sysmon%20For%20Linux","azuresentinel","azure-sentinel-solution-sysmonforlinux","2021-10-27","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MicrosoftSysmonForLinux","Microsoft","[Deprecated] Microsoft Sysmon For Linux","[Sysmon for Linux](https://github.com/Sysinternals/SysmonForLinux) provides detailed information about process creations, network connections and other system events.
[Sysmon for linux link:]. The Sysmon for Linux connector uses [Syslog](https://aka.ms/sysLogInfo) as its data ingestion method. This solution depends on ASIM to work as expected. [Deploy ASIM](https://aka.ms/DeployASIM) to get the full value from the solution.","[{""title"": """", ""description"": "">This data connector depends on ASIM parsers based on a Kusto Functions to work as expected. [Deploy the parsers](https://aka.ms/ASimSysmonForLinuxARM) \n\n The following functions will be deployed:\n\n - vimFileEventLinuxSysmonFileCreated, vimFileEventLinuxSysmonFileDeleted\n\n - vimProcessCreateLinuxSysmon, vimProcessTerminateLinuxSysmon\n\n - vimNetworkSessionLinuxSysmon \n\n[Read more](https://aka.ms/AboutASIM)"", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n2. Select **Apply below configuration to my machines** and select the facilities and severities.\n3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Sysmon%20For%20Linux/Data%20Connectors/SysmonForLinux.json","true" +"","Microsoft Windows SQL Server Database Audit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Microsoft%20Windows%20SQL%20Server%20Database%20Audit","microsoftsentinelcommunity","azure-sentinel-solution-sqlserverdatabaseaudit","2022-11-29","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","","","","","","","","false" +"","MicrosoftDefenderForEndpoint","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MicrosoftDefenderForEndpoint","azuresentinel","azure-sentinel-solution-microsoftdefenderendpoint","2022-01-31","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","MicrosoftPurviewInsiderRiskManagement","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MicrosoftPurviewInsiderRiskManagement","azuresentinel","azure-sentinel-solution-insiderriskmanagement","2021-10-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"Awareness_Performance_Details_CL","Mimecast","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecast","2024-09-10","2024-09-10","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastATAPI","Mimecast","Mimecast Awareness Training","The data connector for [Mimecast Awareness Training](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) provides customers with the visibility into security events related to the Targeted Threat Protection inspection technologies within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities.
The Mimecast products included within the connector are:
- Performance Details
- Safe Score Details
- User Data
- Watchlist Details
","[{""title"": ""Resource group"", ""description"": ""You need to have a resource group created with a subscription you are going to use.""}, {""title"": ""Functions app"", ""description"": ""You need to have an Azure App registered for this connector to use\n1. Application Id\n2. Tenant Id\n3. Client Id\n4. Client Secret\n5. Entra Object ID""}, {""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Mimecast Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of Mimecast Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Mimecast Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 4 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Mimecast API authorization key(s) or Token, readily available.""}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Mimecast Awareness Training Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastAT-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-MimecastAT-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Region**. \n3. Enter the below information : \n\n\t a. Location - The location in which the data collection rules and data collection endpoints should be deployed\n\n\t b. WorkspaceName - Enter Microsoft Sentinel Workspace Name of Log Analytics workspace\n\n\t c. AzureClientID - Enter Azure Client ID that you have created during app registration\n\n\t d. AzureClientSecret - Enter Azure Client Secret that you have created during creating the client secret\n\n\t e. AzureTenantID - Enter Azure Tenant ID of your Azure Active Directory\n\n\t f. AzureEntraObjectID - Enter Object id of your Microsoft Entra App\n\n\t g. MimecastBaseURL - Enter Base URL of Mimecast API 2.0 (e.g. https://api.services.mimecast.com)\n\n\t h. MimecastClientID - Enter Mimecast Client ID for authentication\n\n\t i. MimecastClientSecret - Enter Mimecast Client Secret for authentication\n\n\t j. MimecastAwarenessPerformanceDetailsTableName - Enter name of the table used to store Awareness Performance Details data. Default is 'Awareness_Performance_Details'\n\n\t k. MimecastAwarenessUserDataTableName - Enter name of the table used to store Awareness User Data data. Default is 'Awareness_User_Data'\n\n\t l. MimecastAwarenessWatchlistDetailsTableName - Enter name of the table used to store Awareness Watchlist Details data. Default is 'Awareness_Watchlist_Details'\n\n\t m. MimecastAwarenessSafeScoreDetailsTableName - Enter name of the table used to store Awareness SafeScore Details data. Default is 'Awareness_SafeScore_Details'\n\n\t n. StartDate - Enter the start date in the 'yyyy-mm-dd' format. If you do not provide a date, data from the last 60 days will be fetched automatically. Ensure that the date is in the past and properly formatted\n\n\t o. Schedule - Please enter a valid Quartz cron-expression. (Example: 0 0 */1 * * *) Do not keep the value empty, minimum value is 10 minutes\n\n\t p. LogLevel - Please add log level or log severity value. By default it is set to INFO\n\n\t q. AppInsightsWorkspaceResourceId - Migrate Classic Application Insights to Log Analytic Workspace which is retiring by 29 Febraury 2024. Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""See the documentation to learn more about API on the [Rest API reference](https://integrations.mimecast.com/documentation/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast/Data%20Connectors/MimecastAT/Mimecast_AT_FunctionApp.json","true" +"Awareness_SafeScore_Details_CL","Mimecast","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecast","2024-09-10","2024-09-10","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastATAPI","Mimecast","Mimecast Awareness Training","The data connector for [Mimecast Awareness Training](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) provides customers with the visibility into security events related to the Targeted Threat Protection inspection technologies within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities.
The Mimecast products included within the connector are:
- Performance Details
- Safe Score Details
- User Data
- Watchlist Details
","[{""title"": ""Resource group"", ""description"": ""You need to have a resource group created with a subscription you are going to use.""}, {""title"": ""Functions app"", ""description"": ""You need to have an Azure App registered for this connector to use\n1. Application Id\n2. Tenant Id\n3. Client Id\n4. Client Secret\n5. Entra Object ID""}, {""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Mimecast Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of Mimecast Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Mimecast Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 4 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Mimecast API authorization key(s) or Token, readily available.""}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Mimecast Awareness Training Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastAT-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-MimecastAT-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Region**. \n3. Enter the below information : \n\n\t a. Location - The location in which the data collection rules and data collection endpoints should be deployed\n\n\t b. WorkspaceName - Enter Microsoft Sentinel Workspace Name of Log Analytics workspace\n\n\t c. AzureClientID - Enter Azure Client ID that you have created during app registration\n\n\t d. AzureClientSecret - Enter Azure Client Secret that you have created during creating the client secret\n\n\t e. AzureTenantID - Enter Azure Tenant ID of your Azure Active Directory\n\n\t f. AzureEntraObjectID - Enter Object id of your Microsoft Entra App\n\n\t g. MimecastBaseURL - Enter Base URL of Mimecast API 2.0 (e.g. https://api.services.mimecast.com)\n\n\t h. MimecastClientID - Enter Mimecast Client ID for authentication\n\n\t i. MimecastClientSecret - Enter Mimecast Client Secret for authentication\n\n\t j. MimecastAwarenessPerformanceDetailsTableName - Enter name of the table used to store Awareness Performance Details data. Default is 'Awareness_Performance_Details'\n\n\t k. MimecastAwarenessUserDataTableName - Enter name of the table used to store Awareness User Data data. Default is 'Awareness_User_Data'\n\n\t l. MimecastAwarenessWatchlistDetailsTableName - Enter name of the table used to store Awareness Watchlist Details data. Default is 'Awareness_Watchlist_Details'\n\n\t m. MimecastAwarenessSafeScoreDetailsTableName - Enter name of the table used to store Awareness SafeScore Details data. Default is 'Awareness_SafeScore_Details'\n\n\t n. StartDate - Enter the start date in the 'yyyy-mm-dd' format. If you do not provide a date, data from the last 60 days will be fetched automatically. Ensure that the date is in the past and properly formatted\n\n\t o. Schedule - Please enter a valid Quartz cron-expression. (Example: 0 0 */1 * * *) Do not keep the value empty, minimum value is 10 minutes\n\n\t p. LogLevel - Please add log level or log severity value. By default it is set to INFO\n\n\t q. AppInsightsWorkspaceResourceId - Migrate Classic Application Insights to Log Analytic Workspace which is retiring by 29 Febraury 2024. Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""See the documentation to learn more about API on the [Rest API reference](https://integrations.mimecast.com/documentation/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast/Data%20Connectors/MimecastAT/Mimecast_AT_FunctionApp.json","true" +"Awareness_User_Data_CL","Mimecast","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecast","2024-09-10","2024-09-10","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastATAPI","Mimecast","Mimecast Awareness Training","The data connector for [Mimecast Awareness Training](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) provides customers with the visibility into security events related to the Targeted Threat Protection inspection technologies within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities.
The Mimecast products included within the connector are:
- Performance Details
- Safe Score Details
- User Data
- Watchlist Details
","[{""title"": ""Resource group"", ""description"": ""You need to have a resource group created with a subscription you are going to use.""}, {""title"": ""Functions app"", ""description"": ""You need to have an Azure App registered for this connector to use\n1. Application Id\n2. Tenant Id\n3. Client Id\n4. Client Secret\n5. Entra Object ID""}, {""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Mimecast Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of Mimecast Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Mimecast Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 4 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Mimecast API authorization key(s) or Token, readily available.""}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Mimecast Awareness Training Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastAT-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-MimecastAT-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Region**. \n3. Enter the below information : \n\n\t a. Location - The location in which the data collection rules and data collection endpoints should be deployed\n\n\t b. WorkspaceName - Enter Microsoft Sentinel Workspace Name of Log Analytics workspace\n\n\t c. AzureClientID - Enter Azure Client ID that you have created during app registration\n\n\t d. AzureClientSecret - Enter Azure Client Secret that you have created during creating the client secret\n\n\t e. AzureTenantID - Enter Azure Tenant ID of your Azure Active Directory\n\n\t f. AzureEntraObjectID - Enter Object id of your Microsoft Entra App\n\n\t g. MimecastBaseURL - Enter Base URL of Mimecast API 2.0 (e.g. https://api.services.mimecast.com)\n\n\t h. MimecastClientID - Enter Mimecast Client ID for authentication\n\n\t i. MimecastClientSecret - Enter Mimecast Client Secret for authentication\n\n\t j. MimecastAwarenessPerformanceDetailsTableName - Enter name of the table used to store Awareness Performance Details data. Default is 'Awareness_Performance_Details'\n\n\t k. MimecastAwarenessUserDataTableName - Enter name of the table used to store Awareness User Data data. Default is 'Awareness_User_Data'\n\n\t l. MimecastAwarenessWatchlistDetailsTableName - Enter name of the table used to store Awareness Watchlist Details data. Default is 'Awareness_Watchlist_Details'\n\n\t m. MimecastAwarenessSafeScoreDetailsTableName - Enter name of the table used to store Awareness SafeScore Details data. Default is 'Awareness_SafeScore_Details'\n\n\t n. StartDate - Enter the start date in the 'yyyy-mm-dd' format. If you do not provide a date, data from the last 60 days will be fetched automatically. Ensure that the date is in the past and properly formatted\n\n\t o. Schedule - Please enter a valid Quartz cron-expression. (Example: 0 0 */1 * * *) Do not keep the value empty, minimum value is 10 minutes\n\n\t p. LogLevel - Please add log level or log severity value. By default it is set to INFO\n\n\t q. AppInsightsWorkspaceResourceId - Migrate Classic Application Insights to Log Analytic Workspace which is retiring by 29 Febraury 2024. Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""See the documentation to learn more about API on the [Rest API reference](https://integrations.mimecast.com/documentation/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast/Data%20Connectors/MimecastAT/Mimecast_AT_FunctionApp.json","true" +"Awareness_Watchlist_Details_CL","Mimecast","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecast","2024-09-10","2024-09-10","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastATAPI","Mimecast","Mimecast Awareness Training","The data connector for [Mimecast Awareness Training](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) provides customers with the visibility into security events related to the Targeted Threat Protection inspection technologies within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities.
The Mimecast products included within the connector are:
- Performance Details
- Safe Score Details
- User Data
- Watchlist Details
","[{""title"": ""Resource group"", ""description"": ""You need to have a resource group created with a subscription you are going to use.""}, {""title"": ""Functions app"", ""description"": ""You need to have an Azure App registered for this connector to use\n1. Application Id\n2. Tenant Id\n3. Client Id\n4. Client Secret\n5. Entra Object ID""}, {""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Mimecast Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of Mimecast Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Mimecast Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 4 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Mimecast API authorization key(s) or Token, readily available.""}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Mimecast Awareness Training Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastAT-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-MimecastAT-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Region**. \n3. Enter the below information : \n\n\t a. Location - The location in which the data collection rules and data collection endpoints should be deployed\n\n\t b. WorkspaceName - Enter Microsoft Sentinel Workspace Name of Log Analytics workspace\n\n\t c. AzureClientID - Enter Azure Client ID that you have created during app registration\n\n\t d. AzureClientSecret - Enter Azure Client Secret that you have created during creating the client secret\n\n\t e. AzureTenantID - Enter Azure Tenant ID of your Azure Active Directory\n\n\t f. AzureEntraObjectID - Enter Object id of your Microsoft Entra App\n\n\t g. MimecastBaseURL - Enter Base URL of Mimecast API 2.0 (e.g. https://api.services.mimecast.com)\n\n\t h. MimecastClientID - Enter Mimecast Client ID for authentication\n\n\t i. MimecastClientSecret - Enter Mimecast Client Secret for authentication\n\n\t j. MimecastAwarenessPerformanceDetailsTableName - Enter name of the table used to store Awareness Performance Details data. Default is 'Awareness_Performance_Details'\n\n\t k. MimecastAwarenessUserDataTableName - Enter name of the table used to store Awareness User Data data. Default is 'Awareness_User_Data'\n\n\t l. MimecastAwarenessWatchlistDetailsTableName - Enter name of the table used to store Awareness Watchlist Details data. Default is 'Awareness_Watchlist_Details'\n\n\t m. MimecastAwarenessSafeScoreDetailsTableName - Enter name of the table used to store Awareness SafeScore Details data. Default is 'Awareness_SafeScore_Details'\n\n\t n. StartDate - Enter the start date in the 'yyyy-mm-dd' format. If you do not provide a date, data from the last 60 days will be fetched automatically. Ensure that the date is in the past and properly formatted\n\n\t o. Schedule - Please enter a valid Quartz cron-expression. (Example: 0 0 */1 * * *) Do not keep the value empty, minimum value is 10 minutes\n\n\t p. LogLevel - Please add log level or log severity value. By default it is set to INFO\n\n\t q. AppInsightsWorkspaceResourceId - Migrate Classic Application Insights to Log Analytic Workspace which is retiring by 29 Febraury 2024. Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""See the documentation to learn more about API on the [Rest API reference](https://integrations.mimecast.com/documentation/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast/Data%20Connectors/MimecastAT/Mimecast_AT_FunctionApp.json","true" +"Audit_CL","Mimecast","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecast","2024-09-10","2024-09-10","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastAuditAPI","Mimecast","Mimecast Audit","The data connector for [Mimecast Audit](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) provides customers with the visibility into security events related to audit and authentication events within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into user activity, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities.
The Mimecast products included within the connector are:
Audit
","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Configuration:"", ""description"": ""**STEP 1 - Configuration steps for the Mimecast API**\n\nGo to ***Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> New client secret*** and create a new secret (save the Value somewhere safe right away because you will not be able to preview it later)""}, {""title"": """", ""description"": ""**STEP 2 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Mimecast API authorization key(s) or Token, readily available.""}, {""title"": """", ""description"": ""**STEP 3 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 4 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TenableVM Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 5 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": ""Deploy the Mimecast Audit Data Connector:"", ""description"": ""Use this method for automated deployment of the Mimecast Audit Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastAuditAzureDeploy-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-MimecastAuditAzureDeploy-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Region**. \n3. Enter the below information : \n\n\t a. Location - The location in which the data collection rules and data collection endpoints should be deployed\n\n\t b. WorkspaceName - Enter Microsoft Sentinel Workspace Name of Log Analytics workspace\n\n\t c. AzureClientID - Enter Azure Client ID that you have created during app registration\n\n\t d. AzureClientSecret - Enter Azure Client Secret that you have created during creating the client secret\n\n\t e. AzureTenantID - Enter Azure Tenant ID of your Azure Active Directory\n\n\t f. AzureEntraObjectID - Enter Object id of your Microsoft Entra App\n\n\t g. MimecastBaseURL - Enter Base URL of Mimecast API 2.0 (e.g. https://api.services.mimecast.com)\n\n\t h. MimecastClientID - Enter Mimecast Client ID for authentication\n\n\t i. MimecastClientSecret - Enter Mimecast Client Secret for authentication\n\n\t j. MimecastAuditTableName - Enter name of the table used to store Audit data. Default is 'Audit'\n\n\t k. StartDate - Enter the start date in the 'yyyy-mm-dd' format. If you do not provide a date, data from the last 60 days will be fetched automatically. Ensure that the date is in the past and properly formatted\n\n\t l. Schedule - Please enter a valid Quartz cron-expression. (Example: 0 0 */1 * * *) Do not keep the value empty, minimum value is 10 minutes\n\n\t m. LogLevel - Please add log level or log severity value. By default it is set to INFO\n\n\t n. AppInsightsWorkspaceResourceId - Migrate Classic Application Insights to Log Analytic Workspace which is retiring by 29 Febraury 2024. Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""See the documentation to learn more about API on the [Rest API reference](https://integrations.mimecast.com/documentation/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast/Data%20Connectors/MimecastAudit/Mimecast_Audit_FunctionApp.json","true" +"Cloud_Integrated_CL","Mimecast","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecast","2024-09-10","2024-09-10","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastCIAPI","Mimecast","Mimecast Cloud Integrated","The data connector for [Mimecast Cloud Integrated](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) provides customers with the visibility into security events related to the Cloud Integrated inspection technologies within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities.","[{""title"": ""Resource group"", ""description"": ""You need to have a resource group created with a subscription you are going to use.""}, {""title"": ""Functions app"", ""description"": ""You need to have an Azure App registered for this connector to use\n1. Application Id\n2. Tenant Id\n3. Client Id\n4. Client Secret""}, {""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Configuration:"", ""description"": ""**STEP 1 - Configuration steps for the Mimecast API**\n\nGo to ***Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> New client secret*** and create a new secret (save the Value somewhere safe right away because you will not be able to preview it later)""}, {""title"": """", ""description"": ""**STEP 2 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Mimecast API authorization key(s) or Token, readily available.""}, {""title"": """", ""description"": ""**STEP 3 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 4 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TenableVM Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 5 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Mimecast Cloud Integrated Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastCI-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-MimecastCI-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Region**. \n3. Enter the below information : \n\n\t a. Location - The location in which the data collection rules and data collection endpoints should be deployed\n\n\t b. WorkspaceName - Enter Microsoft Sentinel Workspace Name of Log Analytics workspace\n\n\t c. AzureClientID - Enter Azure Client ID that you have created during app registration\n\n\t d. AzureClientSecret - Enter Azure Client Secret that you have created during creating the client secret\n\n\t e. AzureTenantID - Enter Azure Tenant ID of your Azure Active Directory\n\n\t f. AzureEntraObjectID - Enter Object id of your Microsoft Entra App\n\n\t g. MimecastBaseURL - Enter Base URL of Mimecast API 2.0 (e.g. https://api.services.mimecast.com)\n\n\t h. MimecastClientID - Enter Mimecast Client ID for authentication\n\n\t i. MimecastClientSecret - Enter Mimecast Client Secret for authentication\n\n\t j. MimecastCITableName - Enter name of the table used to store Cloud Integrated data. Default is 'Cloud_Integrated'\n\n\t k. StartDate - Enter the start date in the 'yyyy-mm-dd' format. If you do not provide a date, data from the last 60 days will be fetched automatically. Ensure that the date is in the past and properly formatted\n\n\t l. Schedule - Please enter a valid Quartz cron-expression. (Example: 0 0 */1 * * *) Do not keep the value empty, minimum value is 10 minutes\n\n\t m. LogLevel - Please add log level or log severity value. By default it is set to INFO\n\n\t n. AppInsightsWorkspaceResourceId - Migrate Classic Application Insights to Log Analytic Workspace which is retiring by 29 Febraury 2024. Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""See the documentation to learn more about API on the [Rest API reference](https://integrations.mimecast.com/documentation/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast/Data%20Connectors/MimecastCloudIntegrated/Mimecast_Cloud_Integrated_FunctionApp.json","true" +"Seg_Cg_CL","Mimecast","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecast","2024-09-10","2024-09-10","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastSEGAPI","Mimecast","Mimecast Secure Email Gateway","The data connector for [Mimecast Secure Email Gateway](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) allows easy log collection from the Secure Email Gateway to surface email insight and user activity within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities. Mimecast products and features required:
- Mimecast Cloud Gateway
- Mimecast Data Leak Prevention
","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Configuration:"", ""description"": ""**STEP 1 - Configuration steps for the Mimecast API**\n\nGo to ***Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> New client secret*** and create a new secret (save the Value somewhere safe right away because you will not be able to preview it later)""}, {""title"": """", ""description"": ""****STEP 2 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Mimecast API authorization key(s) or Token, readily available.""}, {""title"": """", ""description"": ""**STEP 3 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 4 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TenableVM Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 5 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": ""Deploy the Mimecast Secure Email Gateway Data Connector:"", ""description"": ""Use this method for automated deployment of the Mimecast Secure Email Gateway Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastSEGAzureDeploy-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-MimecastSEGAzureDeploy-azuredeploy-gov)\n2. SSelect the preferred **Subscription**, **Resource Group** and **Region**. \n3. Enter the below information : \n\n\t a. Location - The location in which the data collection rules and data collection endpoints should be deployed\n\n\t b. WorkspaceName - Enter Microsoft Sentinel Workspace Name of Log Analytics workspace\n\n\t c. AzureClientID - Enter Azure Client ID that you have created during app registration\n\n\t d. AzureClientSecret - Enter Azure Client Secret that you have created during creating the client secret\n\n\t e. AzureTenantID - Enter Azure Tenant ID of your Azure Active Directory\n\n\t f. AzureEntraObjectID - Enter Object id of your Microsoft Entra App\n\n\t g. MimecastBaseURL - Enter Base URL of Mimecast API 2.0 (e.g. https://api.services.mimecast.com)\n\n\t h. MimecastClientID - Enter Mimecast Client ID for authentication\n\n\t i. MimecastClientSecret - Enter Mimecast Client Secret for authentication\n\n\t j. MimecastCGTableName - Enter name of the table used to store CG data. Default is 'Seg_Cg'\n\n\t k. MimecastDLPTableName - Enter name of the table used to store DLP data. Default is 'Seg_Dlp'\n\n\t l. StartDate - Enter the start date in the 'yyyy-mm-dd' format. If you do not provide a date, data from the last 60 days will be fetched automatically. Ensure that the date is in the past and properly formatted\n\n\t m. Schedule - Please enter a valid Quartz cron-expression. (Example: 0 0 */1 * * *) Do not keep the value empty, minimum value is 10 minutes\n\n\t n. LogLevel - Please add log level or log severity value. By default it is set to INFO\n\n\t o. AppInsightsWorkspaceResourceId - Migrate Classic Application Insights to Log Analytic Workspace which is retiring by 29 Febraury 2024. Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""See the documentation to learn more about API on the [Rest API reference](https://integrations.mimecast.com/documentation/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast/Data%20Connectors/MimecastSEG/Mimecast_SEG_FunctionApp.json","true" +"Seg_Dlp_CL","Mimecast","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecast","2024-09-10","2024-09-10","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastSEGAPI","Mimecast","Mimecast Secure Email Gateway","The data connector for [Mimecast Secure Email Gateway](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) allows easy log collection from the Secure Email Gateway to surface email insight and user activity within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities. Mimecast products and features required:
- Mimecast Cloud Gateway
- Mimecast Data Leak Prevention
","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Configuration:"", ""description"": ""**STEP 1 - Configuration steps for the Mimecast API**\n\nGo to ***Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> New client secret*** and create a new secret (save the Value somewhere safe right away because you will not be able to preview it later)""}, {""title"": """", ""description"": ""****STEP 2 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Mimecast API authorization key(s) or Token, readily available.""}, {""title"": """", ""description"": ""**STEP 3 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 4 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TenableVM Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 5 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": ""Deploy the Mimecast Secure Email Gateway Data Connector:"", ""description"": ""Use this method for automated deployment of the Mimecast Secure Email Gateway Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastSEGAzureDeploy-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-MimecastSEGAzureDeploy-azuredeploy-gov)\n2. SSelect the preferred **Subscription**, **Resource Group** and **Region**. \n3. Enter the below information : \n\n\t a. Location - The location in which the data collection rules and data collection endpoints should be deployed\n\n\t b. WorkspaceName - Enter Microsoft Sentinel Workspace Name of Log Analytics workspace\n\n\t c. AzureClientID - Enter Azure Client ID that you have created during app registration\n\n\t d. AzureClientSecret - Enter Azure Client Secret that you have created during creating the client secret\n\n\t e. AzureTenantID - Enter Azure Tenant ID of your Azure Active Directory\n\n\t f. AzureEntraObjectID - Enter Object id of your Microsoft Entra App\n\n\t g. MimecastBaseURL - Enter Base URL of Mimecast API 2.0 (e.g. https://api.services.mimecast.com)\n\n\t h. MimecastClientID - Enter Mimecast Client ID for authentication\n\n\t i. MimecastClientSecret - Enter Mimecast Client Secret for authentication\n\n\t j. MimecastCGTableName - Enter name of the table used to store CG data. Default is 'Seg_Cg'\n\n\t k. MimecastDLPTableName - Enter name of the table used to store DLP data. Default is 'Seg_Dlp'\n\n\t l. StartDate - Enter the start date in the 'yyyy-mm-dd' format. If you do not provide a date, data from the last 60 days will be fetched automatically. Ensure that the date is in the past and properly formatted\n\n\t m. Schedule - Please enter a valid Quartz cron-expression. (Example: 0 0 */1 * * *) Do not keep the value empty, minimum value is 10 minutes\n\n\t n. LogLevel - Please add log level or log severity value. By default it is set to INFO\n\n\t o. AppInsightsWorkspaceResourceId - Migrate Classic Application Insights to Log Analytic Workspace which is retiring by 29 Febraury 2024. Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""See the documentation to learn more about API on the [Rest API reference](https://integrations.mimecast.com/documentation/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast/Data%20Connectors/MimecastSEG/Mimecast_SEG_FunctionApp.json","true" +"Ttp_Attachment_CL","Mimecast","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecast","2024-09-10","2024-09-10","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastTTPAPI","Mimecast","Mimecast Targeted Threat Protection","The data connector for [Mimecast Targeted Threat Protection](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) provides customers with the visibility into security events related to the Targeted Threat Protection inspection technologies within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities.
The Mimecast products included within the connector are:
- URL Protect
- Impersonation Protect
- Attachment Protect
","[{""title"": ""Resource group"", ""description"": ""You need to have a resource group created with a subscription you are going to use.""}, {""title"": ""Functions app"", ""description"": ""You need to have an Azure App registered for this connector to use\n1. Application Id\n2. Tenant Id\n3. Client Id\n4. Client Secret""}, {""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Mimecast Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of Mimecast Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Mimecast Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 4 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Mimecast API authorization key(s) or Token, readily available.""}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Mimecast Targeted Threat Protection Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastTTPAzureDeploy-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-MimecastTTPAzureDeploy-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Region**. \n3. Enter the below information : \n\n\t a. Location - The location in which the data collection rules and data collection endpoints should be deployed\n\n\t b. WorkspaceName - Enter Microsoft Sentinel Workspace Name of Log Analytics workspace\n\n\t c. AzureClientID - Enter Azure Client ID that you have created during app registration\n\n\t d. AzureClientSecret - Enter Azure Client Secret that you have created during creating the client secret\n\n\t e. AzureTenantID - Enter Azure Tenant ID of your Azure Active Directory\n\n\t f. AzureEntraObjectID - Enter Object id of your Microsoft Entra App\n\n\t g. MimecastBaseURL - Enter Base URL of Mimecast API 2.0 (e.g. https://api.services.mimecast.com)\n\n\t h. MimecastClientID - Enter Mimecast Client ID for authentication\n\n\t i. MimecastClientSecret - Enter Mimecast Client Secret for authentication\n\n\t j. StartDate - Enter the start date in the 'yyyy-mm-dd' format. If you do not provide a date, data from the last 60 days will be fetched automatically. Ensure that the date is in the past and properly formatted\n\n\t k. MimecastTTPAttachmentTableName - Enter name of the table used to store TTP Attachment data. Default is 'Ttp_Attachment'\n\n\t l. MimecastTTPImpersonationTableName - Enter name of the table used to store TTP Impersonation data. Default is 'Ttp_Impersonation'\n\n\t m. MimecastTTPUrlTableName - Enter name of the table used to store TTP Attachment data. Default is 'Ttp_Url'\n\n\t n. Schedule - Please enter a valid Quartz cron-expression. (Example: 0 0 */1 * * *) Do not keep the value empty, minimum value is 10 minutes\n\n\t l. LogLevel - Please add log level or log severity value. By default it is set to INFO\n\n\t o. AppInsightsWorkspaceResourceId - Migrate Classic Application Insights to Log Analytic Workspace which is retiring by 29 Febraury 2024. Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""See the documentation to learn more about API on the [Rest API reference](https://integrations.mimecast.com/documentation/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast/Data%20Connectors/MimecastTTP/Mimecast_TTP_FunctionApp.json","true" +"Ttp_Impersonation_CL","Mimecast","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecast","2024-09-10","2024-09-10","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastTTPAPI","Mimecast","Mimecast Targeted Threat Protection","The data connector for [Mimecast Targeted Threat Protection](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) provides customers with the visibility into security events related to the Targeted Threat Protection inspection technologies within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities.
The Mimecast products included within the connector are:
- URL Protect
- Impersonation Protect
- Attachment Protect
","[{""title"": ""Resource group"", ""description"": ""You need to have a resource group created with a subscription you are going to use.""}, {""title"": ""Functions app"", ""description"": ""You need to have an Azure App registered for this connector to use\n1. Application Id\n2. Tenant Id\n3. Client Id\n4. Client Secret""}, {""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Mimecast Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of Mimecast Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Mimecast Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 4 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Mimecast API authorization key(s) or Token, readily available.""}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Mimecast Targeted Threat Protection Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastTTPAzureDeploy-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-MimecastTTPAzureDeploy-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Region**. \n3. Enter the below information : \n\n\t a. Location - The location in which the data collection rules and data collection endpoints should be deployed\n\n\t b. WorkspaceName - Enter Microsoft Sentinel Workspace Name of Log Analytics workspace\n\n\t c. AzureClientID - Enter Azure Client ID that you have created during app registration\n\n\t d. AzureClientSecret - Enter Azure Client Secret that you have created during creating the client secret\n\n\t e. AzureTenantID - Enter Azure Tenant ID of your Azure Active Directory\n\n\t f. AzureEntraObjectID - Enter Object id of your Microsoft Entra App\n\n\t g. MimecastBaseURL - Enter Base URL of Mimecast API 2.0 (e.g. https://api.services.mimecast.com)\n\n\t h. MimecastClientID - Enter Mimecast Client ID for authentication\n\n\t i. MimecastClientSecret - Enter Mimecast Client Secret for authentication\n\n\t j. StartDate - Enter the start date in the 'yyyy-mm-dd' format. If you do not provide a date, data from the last 60 days will be fetched automatically. Ensure that the date is in the past and properly formatted\n\n\t k. MimecastTTPAttachmentTableName - Enter name of the table used to store TTP Attachment data. Default is 'Ttp_Attachment'\n\n\t l. MimecastTTPImpersonationTableName - Enter name of the table used to store TTP Impersonation data. Default is 'Ttp_Impersonation'\n\n\t m. MimecastTTPUrlTableName - Enter name of the table used to store TTP Attachment data. Default is 'Ttp_Url'\n\n\t n. Schedule - Please enter a valid Quartz cron-expression. (Example: 0 0 */1 * * *) Do not keep the value empty, minimum value is 10 minutes\n\n\t l. LogLevel - Please add log level or log severity value. By default it is set to INFO\n\n\t o. AppInsightsWorkspaceResourceId - Migrate Classic Application Insights to Log Analytic Workspace which is retiring by 29 Febraury 2024. Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""See the documentation to learn more about API on the [Rest API reference](https://integrations.mimecast.com/documentation/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast/Data%20Connectors/MimecastTTP/Mimecast_TTP_FunctionApp.json","true" +"Ttp_Url_CL","Mimecast","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecast","2024-09-10","2024-09-10","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastTTPAPI","Mimecast","Mimecast Targeted Threat Protection","The data connector for [Mimecast Targeted Threat Protection](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) provides customers with the visibility into security events related to the Targeted Threat Protection inspection technologies within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities.
The Mimecast products included within the connector are:
- URL Protect
- Impersonation Protect
- Attachment Protect
","[{""title"": ""Resource group"", ""description"": ""You need to have a resource group created with a subscription you are going to use.""}, {""title"": ""Functions app"", ""description"": ""You need to have an Azure App registered for this connector to use\n1. Application Id\n2. Tenant Id\n3. Client Id\n4. Client Secret""}, {""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Mimecast Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of Mimecast Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Mimecast Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 4 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Mimecast API authorization key(s) or Token, readily available.""}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Mimecast Targeted Threat Protection Data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastTTPAzureDeploy-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-MimecastTTPAzureDeploy-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Region**. \n3. Enter the below information : \n\n\t a. Location - The location in which the data collection rules and data collection endpoints should be deployed\n\n\t b. WorkspaceName - Enter Microsoft Sentinel Workspace Name of Log Analytics workspace\n\n\t c. AzureClientID - Enter Azure Client ID that you have created during app registration\n\n\t d. AzureClientSecret - Enter Azure Client Secret that you have created during creating the client secret\n\n\t e. AzureTenantID - Enter Azure Tenant ID of your Azure Active Directory\n\n\t f. AzureEntraObjectID - Enter Object id of your Microsoft Entra App\n\n\t g. MimecastBaseURL - Enter Base URL of Mimecast API 2.0 (e.g. https://api.services.mimecast.com)\n\n\t h. MimecastClientID - Enter Mimecast Client ID for authentication\n\n\t i. MimecastClientSecret - Enter Mimecast Client Secret for authentication\n\n\t j. StartDate - Enter the start date in the 'yyyy-mm-dd' format. If you do not provide a date, data from the last 60 days will be fetched automatically. Ensure that the date is in the past and properly formatted\n\n\t k. MimecastTTPAttachmentTableName - Enter name of the table used to store TTP Attachment data. Default is 'Ttp_Attachment'\n\n\t l. MimecastTTPImpersonationTableName - Enter name of the table used to store TTP Impersonation data. Default is 'Ttp_Impersonation'\n\n\t m. MimecastTTPUrlTableName - Enter name of the table used to store TTP Attachment data. Default is 'Ttp_Url'\n\n\t n. Schedule - Please enter a valid Quartz cron-expression. (Example: 0 0 */1 * * *) Do not keep the value empty, minimum value is 10 minutes\n\n\t l. LogLevel - Please add log level or log severity value. By default it is set to INFO\n\n\t o. AppInsightsWorkspaceResourceId - Migrate Classic Application Insights to Log Analytic Workspace which is retiring by 29 Febraury 2024. Use 'Log Analytic Workspace-->Properties' blade having 'Resource ID' property value. This is a fully qualified resourceId which is in format '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}' \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""See the documentation to learn more about API on the [Rest API reference](https://integrations.mimecast.com/documentation/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mimecast/Data%20Connectors/MimecastTTP/Mimecast_TTP_FunctionApp.json","true" +"MimecastAudit_CL","MimecastAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastAudit","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecastaudit","2022-02-24","2022-02-24","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastAuditAPI","Mimecast","Mimecast Audit & Authentication","The data connector for [Mimecast Audit & Authentication](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) provides customers with the visibility into security events related to audit and authentication events within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into user activity, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities.
The Mimecast products included within the connector are:
Audit & Authentication
","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Configuration:"", ""description"": ""**STEP 1 - Configuration steps for the Mimecast API**\n\nGo to ***Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> New client secret*** and create a new secret (save the Value somewhere safe right away because you will not be able to preview it later)""}, {""title"": """", ""description"": ""**STEP 2 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Mimecast API authorization key(s) or Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Deploy the Mimecast Audit & Authentication Data Connector:"", ""description"": ""\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastAudit-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the following fields:\n - appName: Unique string that will be used as id for the app in Azure platform\n - objectId: Azure portal ---> Azure Active Directory ---> more info ---> Profile -----> Object ID\n - appInsightsLocation(default): westeurope\n - mimecastEmail: Email address of dedicated user for this integraion\n - mimecastPassword: Password for dedicated user\n - mimecastAppId: Application Id from the Microsoft Sentinel app registered with Mimecast\n - mimecastAppKey: Application Key from the Microsoft Sentinel app registered with Mimecast\n - mimecastAccessKey: Access Key for the dedicated Mimecast user\n - mimecastSecretKey: Secret Key for dedicated Mimecast user\n - mimecastBaseURL: Regional Mimecast API Base URL\n - activeDirectoryAppId: Azure portal ---> App registrations ---> [your_app] ---> Application ID\n - activeDirectoryAppSecret: Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> [your_app_secret]\n - workspaceId: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Workspace ID (or you can copy workspaceId from above) \n - workspaceKey: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Primary Key (or you can copy workspaceKey from above) \n - AppInsightsWorkspaceResourceID : Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Properties ---> Resource ID \n\n >Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy. \n\n6. Go to ***Azure portal ---> Resource groups ---> [your_resource_group] ---> [appName](type: Storage account) ---> Storage Explorer ---> BLOB CONTAINERS ---> Audit checkpoints ---> Upload*** and create empty file on your machine named checkpoint.txt and select it for upload (this is done so that date_range for SIEM logs is stored in consistent state)\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Mimecast API credentials"", ""description"": ""You need to have the following pieces of information to configure the integration:\n- mimecastEmail: Email address of a dedicated Mimecast admin user\n- mimecastPassword: Password for the dedicated Mimecast admin user\n- mimecastAppId: API Application Id of the Mimecast Microsoft Sentinel app registered with Mimecast\n- mimecastAppKey: API Application Key of the Mimecast Microsoft Sentinel app registered with Mimecast\n- mimecastAccessKey: Access Key for the dedicated Mimecast admin user\n- mimecastSecretKey: Secret Key for the dedicated Mimecast admin user\n- mimecastBaseURL: Mimecast Regional API Base URL\n\n> The Mimecast Application Id, Application Key, along with the Access Key and Secret keys for the dedicated Mimecast admin user are obtainable via the Mimecast Administration Console: Administration | Services | API and Platform Integrations.\n\n> The Mimecast API Base URL for each region is documented here: https://integrations.mimecast.com/documentation/api-overview/global-base-urls/""}, {""name"": ""Resource group"", ""description"": ""You need to have a resource group created with a subscription you are going to use.""}, {""name"": ""Functions app"", ""description"": ""You need to have an Azure App registered for this connector to use\n1. Application Id\n2. Tenant Id\n3. Client Id\n4. Client Secret""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastAudit/Data%20Connectors/MimecastAudit_API_AzureFunctionApp.json","true" +"MimecastDLP_CL","MimecastSEG","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastSEG","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecastseg","2022-02-24","2022-02-24","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastSIEMAPI","Mimecast","Mimecast Secure Email Gateway","The data connector for [Mimecast Secure Email Gateway](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) allows easy log collection from the Secure Email Gateway to surface email insight and user activity within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities. Mimecast products and features required:
- Mimecast Secure Email Gateway
- Mimecast Data Leak Prevention
","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Configuration:"", ""description"": ""**STEP 1 - Configuration steps for the Mimecast API**\n\nGo to ***Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> New client secret*** and create a new secret (save the Value somewhere safe right away because you will not be able to preview it later)""}, {""title"": """", ""description"": ""**STEP 2 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Mimecast API authorization key(s) or Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Deploy the Mimecast Secure Email Gateway Data Connector:"", ""description"": ""\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastSEG-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the following fields:\n - appName: Unique string that will be used as id for the app in Azure platform\n - objectId: Azure portal ---> Azure Active Directory ---> more info ---> Profile -----> Object ID\n - appInsightsLocation(default): westeurope\n - mimecastEmail: Email address of dedicated user for this integraion\n - mimecastPassword: Password for dedicated user\n - mimecastAppId: Application Id from the Microsoft Sentinel app registered with Mimecast\n - mimecastAppKey: Application Key from the Microsoft Sentinel app registered with Mimecast\n - mimecastAccessKey: Access Key for the dedicated Mimecast user\n - mimecastSecretKey: Secret Key for dedicated Mimecast user\n - mimecastBaseURL: Regional Mimecast API Base URL\n - activeDirectoryAppId: Azure portal ---> App registrations ---> [your_app] ---> Application ID\n - activeDirectoryAppSecret: Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> [your_app_secret]\n - workspaceId: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Workspace ID (or you can copy workspaceId from above) \n - workspaceKey: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Primary Key (or you can copy workspaceKey from above) \n - AppInsightsWorkspaceResourceID : Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Properties ---> Resource ID \n\n >Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n\n6. Go to ***Azure portal ---> Resource groups ---> [your_resource_group] ---> [appName](type: Storage account) ---> Storage Explorer ---> BLOB CONTAINERS ---> SIEM checkpoints ---> Upload*** and create empty file on your machine named checkpoint.txt, dlp-checkpoint.txt and select it for upload (this is done so that date_range for SIEM logs is stored in consistent state)\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Mimecast API credentials"", ""description"": ""You need to have the following pieces of information to configure the integration:\n- mimecastEmail: Email address of a dedicated Mimecast admin user\n- mimecastPassword: Password for the dedicated Mimecast admin user\n- mimecastAppId: API Application Id of the Mimecast Microsoft Sentinel app registered with Mimecast\n- mimecastAppKey: API Application Key of the Mimecast Microsoft Sentinel app registered with Mimecast\n- mimecastAccessKey: Access Key for the dedicated Mimecast admin user\n- mimecastSecretKey: Secret Key for the dedicated Mimecast admin user\n- mimecastBaseURL: Mimecast Regional API Base URL\n\n> The Mimecast Application Id, Application Key, along with the Access Key and Secret keys for the dedicated Mimecast admin user are obtainable via the Mimecast Administration Console: Administration | Services | API and Platform Integrations.\n\n> The Mimecast API Base URL for each region is documented here: https://integrations.mimecast.com/documentation/api-overview/global-base-urls/""}, {""name"": ""Resource group"", ""description"": ""You need to have a resource group created with a subscription you are going to use.""}, {""name"": ""Functions app"", ""description"": ""You need to have an Azure App registered for this connector to use\n1. Application Id\n2. Tenant Id\n3. Client Id\n4. Client Secret""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastSEG/Data%20Connectors/MimecastSEG_API_AzureFunctionApp.json","true" +"MimecastSIEM_CL","MimecastSEG","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastSEG","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecastseg","2022-02-24","2022-02-24","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastSIEMAPI","Mimecast","Mimecast Secure Email Gateway","The data connector for [Mimecast Secure Email Gateway](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) allows easy log collection from the Secure Email Gateway to surface email insight and user activity within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities. Mimecast products and features required:
- Mimecast Secure Email Gateway
- Mimecast Data Leak Prevention
","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Configuration:"", ""description"": ""**STEP 1 - Configuration steps for the Mimecast API**\n\nGo to ***Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> New client secret*** and create a new secret (save the Value somewhere safe right away because you will not be able to preview it later)""}, {""title"": """", ""description"": ""**STEP 2 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Mimecast API authorization key(s) or Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Deploy the Mimecast Secure Email Gateway Data Connector:"", ""description"": ""\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastSEG-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the following fields:\n - appName: Unique string that will be used as id for the app in Azure platform\n - objectId: Azure portal ---> Azure Active Directory ---> more info ---> Profile -----> Object ID\n - appInsightsLocation(default): westeurope\n - mimecastEmail: Email address of dedicated user for this integraion\n - mimecastPassword: Password for dedicated user\n - mimecastAppId: Application Id from the Microsoft Sentinel app registered with Mimecast\n - mimecastAppKey: Application Key from the Microsoft Sentinel app registered with Mimecast\n - mimecastAccessKey: Access Key for the dedicated Mimecast user\n - mimecastSecretKey: Secret Key for dedicated Mimecast user\n - mimecastBaseURL: Regional Mimecast API Base URL\n - activeDirectoryAppId: Azure portal ---> App registrations ---> [your_app] ---> Application ID\n - activeDirectoryAppSecret: Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> [your_app_secret]\n - workspaceId: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Workspace ID (or you can copy workspaceId from above) \n - workspaceKey: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Primary Key (or you can copy workspaceKey from above) \n - AppInsightsWorkspaceResourceID : Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Properties ---> Resource ID \n\n >Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n\n6. Go to ***Azure portal ---> Resource groups ---> [your_resource_group] ---> [appName](type: Storage account) ---> Storage Explorer ---> BLOB CONTAINERS ---> SIEM checkpoints ---> Upload*** and create empty file on your machine named checkpoint.txt, dlp-checkpoint.txt and select it for upload (this is done so that date_range for SIEM logs is stored in consistent state)\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Mimecast API credentials"", ""description"": ""You need to have the following pieces of information to configure the integration:\n- mimecastEmail: Email address of a dedicated Mimecast admin user\n- mimecastPassword: Password for the dedicated Mimecast admin user\n- mimecastAppId: API Application Id of the Mimecast Microsoft Sentinel app registered with Mimecast\n- mimecastAppKey: API Application Key of the Mimecast Microsoft Sentinel app registered with Mimecast\n- mimecastAccessKey: Access Key for the dedicated Mimecast admin user\n- mimecastSecretKey: Secret Key for the dedicated Mimecast admin user\n- mimecastBaseURL: Mimecast Regional API Base URL\n\n> The Mimecast Application Id, Application Key, along with the Access Key and Secret keys for the dedicated Mimecast admin user are obtainable via the Mimecast Administration Console: Administration | Services | API and Platform Integrations.\n\n> The Mimecast API Base URL for each region is documented here: https://integrations.mimecast.com/documentation/api-overview/global-base-urls/""}, {""name"": ""Resource group"", ""description"": ""You need to have a resource group created with a subscription you are going to use.""}, {""name"": ""Functions app"", ""description"": ""You need to have an Azure App registered for this connector to use\n1. Application Id\n2. Tenant Id\n3. Client Id\n4. Client Secret""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastSEG/Data%20Connectors/MimecastSEG_API_AzureFunctionApp.json","true" +"Event","MimecastTIRegional","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastTIRegional","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecasttiregional","2023-08-23","2023-09-11","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastTIRegionalConnectorAzureFunctions","Mimecast","Mimecast Intelligence for Microsoft - Microsoft Sentinel","The data connector for Mimecast Intelligence for Microsoft provides regional threat intelligence curated from Mimecast’s email inspection technologies with pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times.
Mimecast products and features required:
- Mimecast Secure Email Gateway
- Mimecast Threat Intelligence
","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Configuration:"", ""description"": ""**STEP 1 - Configuration steps for the Mimecast API**\n\nGo to ***Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> New client secret*** and create a new secret (save the Value somewhere safe right away because you will not be able to preview it later)""}, {""title"": """", ""description"": ""**STEP 2 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Mimecast API authorization key(s) or Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Enable Mimecast Intelligence for Microsoft - Microsoft Sentinel Connector:"", ""description"": ""\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastTIRegional-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the following fields:\n - appName: Unique string that will be used as id for the app in Azure platform\n - objectId: Azure portal ---> Azure Active Directory ---> more info ---> Profile -----> Object ID\n - appInsightsLocation(default): westeurope\n - mimecastEmail: Email address of dedicated user for this integraion\n - mimecastPassword: Password for dedicated user\n - mimecastAppId: Application Id from the Microsoft Sentinel app registered with Mimecast\n - mimecastAppKey: Application Key from the Microsoft Sentinel app registered with Mimecast\n - mimecastAccessKey: Access Key for the dedicated Mimecast user\n - mimecastSecretKey: Secret Key for dedicated Mimecast user\n - mimecastBaseURL: Regional Mimecast API Base URL\n - activeDirectoryAppId: Azure portal ---> App registrations ---> [your_app] ---> Application ID\n - activeDirectoryAppSecret: Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> [your_app_secret]\n - workspaceId: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Workspace ID (or you can copy workspaceId from above) \n - workspaceKey: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Primary Key (or you can copy workspaceKey from above) \n - AppInsightsWorkspaceResourceID : Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Properties ---> Resource ID \n\n >Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n\n6. Go to ***Azure portal ---> Resource groups ---> [your_resource_group] ---> [appName](type: Storage account) ---> Storage Explorer ---> BLOB CONTAINERS ---> TIR checkpoints ---> Upload*** and create empty file on your machine named checkpoint.txt and select it for upload (this is done so that date_range for TIR logs is stored in consistent state)\n""}, {""title"": ""Additional configuration:"", ""description"": "">Connect to a **Threat Intelligence Platforms** Data Connector. Follow instructions on the connector page and then click connect button.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Mimecast API credentials"", ""description"": ""You need to have the following pieces of information to configure the integration:\n- mimecastEmail: Email address of a dedicated Mimecast admin user\n- mimecastPassword: Password for the dedicated Mimecast admin user\n- mimecastAppId: API Application Id of the Mimecast Microsoft Sentinel app registered with Mimecast\n- mimecastAppKey: API Application Key of the Mimecast Microsoft Sentinel app registered with Mimecast\n- mimecastAccessKey: Access Key for the dedicated Mimecast admin user\n- mimecastSecretKey: Secret Key for the dedicated Mimecast admin user\n- mimecastBaseURL: Mimecast Regional API Base URL\n\n> The Mimecast Application Id, Application Key, along with the Access Key and Secret keys for the dedicated Mimecast admin user are obtainable via the Mimecast Administration Console: Administration | Services | API and Platform Integrations.\n\n> The Mimecast API Base URL for each region is documented here: https://integrations.mimecast.com/documentation/api-overview/global-base-urls/""}, {""name"": ""Resource group"", ""description"": ""You need to have a resource group created with a subscription you are going to use.""}, {""name"": ""Functions app"", ""description"": ""You need to have an Azure App registered for this connector to use\n1. Application Id\n2. Tenant Id\n3. Client Id\n4. Client Secret""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastTIRegional/Data%20Connectors/MimecastTIRegional_API_AzureFunctionApp.json","true" +"ThreatIntelligenceIndicator","MimecastTIRegional","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastTIRegional","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecasttiregional","2023-08-23","2023-09-11","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastTIRegionalConnectorAzureFunctions","Mimecast","Mimecast Intelligence for Microsoft - Microsoft Sentinel","The data connector for Mimecast Intelligence for Microsoft provides regional threat intelligence curated from Mimecast’s email inspection technologies with pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times.
Mimecast products and features required:
- Mimecast Secure Email Gateway
- Mimecast Threat Intelligence
","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Configuration:"", ""description"": ""**STEP 1 - Configuration steps for the Mimecast API**\n\nGo to ***Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> New client secret*** and create a new secret (save the Value somewhere safe right away because you will not be able to preview it later)""}, {""title"": """", ""description"": ""**STEP 2 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Mimecast API authorization key(s) or Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Enable Mimecast Intelligence for Microsoft - Microsoft Sentinel Connector:"", ""description"": ""\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastTIRegional-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the following fields:\n - appName: Unique string that will be used as id for the app in Azure platform\n - objectId: Azure portal ---> Azure Active Directory ---> more info ---> Profile -----> Object ID\n - appInsightsLocation(default): westeurope\n - mimecastEmail: Email address of dedicated user for this integraion\n - mimecastPassword: Password for dedicated user\n - mimecastAppId: Application Id from the Microsoft Sentinel app registered with Mimecast\n - mimecastAppKey: Application Key from the Microsoft Sentinel app registered with Mimecast\n - mimecastAccessKey: Access Key for the dedicated Mimecast user\n - mimecastSecretKey: Secret Key for dedicated Mimecast user\n - mimecastBaseURL: Regional Mimecast API Base URL\n - activeDirectoryAppId: Azure portal ---> App registrations ---> [your_app] ---> Application ID\n - activeDirectoryAppSecret: Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> [your_app_secret]\n - workspaceId: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Workspace ID (or you can copy workspaceId from above) \n - workspaceKey: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Primary Key (or you can copy workspaceKey from above) \n - AppInsightsWorkspaceResourceID : Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Properties ---> Resource ID \n\n >Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n\n6. Go to ***Azure portal ---> Resource groups ---> [your_resource_group] ---> [appName](type: Storage account) ---> Storage Explorer ---> BLOB CONTAINERS ---> TIR checkpoints ---> Upload*** and create empty file on your machine named checkpoint.txt and select it for upload (this is done so that date_range for TIR logs is stored in consistent state)\n""}, {""title"": ""Additional configuration:"", ""description"": "">Connect to a **Threat Intelligence Platforms** Data Connector. Follow instructions on the connector page and then click connect button.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Mimecast API credentials"", ""description"": ""You need to have the following pieces of information to configure the integration:\n- mimecastEmail: Email address of a dedicated Mimecast admin user\n- mimecastPassword: Password for the dedicated Mimecast admin user\n- mimecastAppId: API Application Id of the Mimecast Microsoft Sentinel app registered with Mimecast\n- mimecastAppKey: API Application Key of the Mimecast Microsoft Sentinel app registered with Mimecast\n- mimecastAccessKey: Access Key for the dedicated Mimecast admin user\n- mimecastSecretKey: Secret Key for the dedicated Mimecast admin user\n- mimecastBaseURL: Mimecast Regional API Base URL\n\n> The Mimecast Application Id, Application Key, along with the Access Key and Secret keys for the dedicated Mimecast admin user are obtainable via the Mimecast Administration Console: Administration | Services | API and Platform Integrations.\n\n> The Mimecast API Base URL for each region is documented here: https://integrations.mimecast.com/documentation/api-overview/global-base-urls/""}, {""name"": ""Resource group"", ""description"": ""You need to have a resource group created with a subscription you are going to use.""}, {""name"": ""Functions app"", ""description"": ""You need to have an Azure App registered for this connector to use\n1. Application Id\n2. Tenant Id\n3. Client Id\n4. Client Secret""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastTIRegional/Data%20Connectors/MimecastTIRegional_API_AzureFunctionApp.json","true" +"MimecastTTPAttachment_CL","MimecastTTP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastTTP","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecastttp","2022-02-24","2022-02-24","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastTTPAPI","Mimecast","Mimecast Targeted Threat Protection","The data connector for [Mimecast Targeted Threat Protection](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) provides customers with the visibility into security events related to the Targeted Threat Protection inspection technologies within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities.
The Mimecast products included within the connector are:
- URL Protect
- Impersonation Protect
- Attachment Protect
","[{""title"": ""Resource group"", ""description"": ""You need to have a resource group created with a subscription you are going to use.""}, {""title"": ""Functions app"", ""description"": ""You need to have an Azure App registered for this connector to use\n1. Application Id\n2. Tenant Id\n3. Client Id\n4. Client Secret""}, {""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Configuration:"", ""description"": ""**STEP 1 - Configuration steps for the Mimecast API**\n\nGo to ***Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> New client secret*** and create a new secret (save the Value somewhere safe right away because you will not be able to preview it later)""}, {""title"": """", ""description"": ""**STEP 2 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Mimecast API authorization key(s) or Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Deploy the Mimecast Targeted Threat Protection Data Connector:"", ""description"": ""\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastTTP-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the following fields:\n - appName: Unique string that will be used as id for the app in Azure platform\n - objectId: Azure portal ---> Azure Active Directory ---> more info ---> Profile -----> Object ID\n - appInsightsLocation(default): westeurope\n - mimecastEmail: Email address of dedicated user for this integraion\n - mimecastPassword: Password for dedicated user\n - mimecastAppId: Application Id from the Microsoft Sentinel app registered with Mimecast\n - mimecastAppKey: Application Key from the Microsoft Sentinel app registered with Mimecast\n - mimecastAccessKey: Access Key for the dedicated Mimecast user\n - mimecastSecretKey: Secret Key for dedicated Mimecast user\n - mimecastBaseURL: Regional Mimecast API Base URL\n - activeDirectoryAppId: Azure portal ---> App registrations ---> [your_app] ---> Application ID\n - activeDirectoryAppSecret: Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> [your_app_secret]\n - workspaceId: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Workspace ID (or you can copy workspaceId from above) \n - workspaceKey: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Primary Key (or you can copy workspaceKey from above) \n - AppInsightsWorkspaceResourceID : Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Properties ---> Resource ID \n\n >Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n\n6. Go to ***Azure portal ---> Resource groups ---> [your_resource_group] ---> [appName](type: Storage account) ---> Storage Explorer ---> BLOB CONTAINERS ---> TTP checkpoints ---> Upload*** and create empty files on your machine named attachment-checkpoint.txt, impersonation-checkpoint.txt, url-checkpoint.txt and select them for upload (this is done so that date_range for TTP logs are stored in consistent state)\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""You need to have the following pieces of information to configure the integration:\n- mimecastEmail: Email address of a dedicated Mimecast admin user\n- mimecastPassword: Password for the dedicated Mimecast admin user\n- mimecastAppId: API Application Id of the Mimecast Microsoft Sentinel app registered with Mimecast\n- mimecastAppKey: API Application Key of the Mimecast Microsoft Sentinel app registered with Mimecast\n- mimecastAccessKey: Access Key for the dedicated Mimecast admin user\n- mimecastSecretKey: Secret Key for the dedicated Mimecast admin user\n- mimecastBaseURL: Mimecast Regional API Base URL\n\n> The Mimecast Application Id, Application Key, along with the Access Key and Secret keys for the dedicated Mimecast admin user are obtainable via the Mimecast Administration Console: Administration | Services | API and Platform Integrations.\n\n> The Mimecast API Base URL for each region is documented here: https://integrations.mimecast.com/documentation/api-overview/global-base-urls/""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastTTP/Data%20Connectors/MimecastTTP_API_FunctionApp.json","true" +"MimecastTTPImpersonation_CL","MimecastTTP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastTTP","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecastttp","2022-02-24","2022-02-24","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastTTPAPI","Mimecast","Mimecast Targeted Threat Protection","The data connector for [Mimecast Targeted Threat Protection](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) provides customers with the visibility into security events related to the Targeted Threat Protection inspection technologies within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities.
The Mimecast products included within the connector are:
- URL Protect
- Impersonation Protect
- Attachment Protect
","[{""title"": ""Resource group"", ""description"": ""You need to have a resource group created with a subscription you are going to use.""}, {""title"": ""Functions app"", ""description"": ""You need to have an Azure App registered for this connector to use\n1. Application Id\n2. Tenant Id\n3. Client Id\n4. Client Secret""}, {""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Configuration:"", ""description"": ""**STEP 1 - Configuration steps for the Mimecast API**\n\nGo to ***Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> New client secret*** and create a new secret (save the Value somewhere safe right away because you will not be able to preview it later)""}, {""title"": """", ""description"": ""**STEP 2 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Mimecast API authorization key(s) or Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Deploy the Mimecast Targeted Threat Protection Data Connector:"", ""description"": ""\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastTTP-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the following fields:\n - appName: Unique string that will be used as id for the app in Azure platform\n - objectId: Azure portal ---> Azure Active Directory ---> more info ---> Profile -----> Object ID\n - appInsightsLocation(default): westeurope\n - mimecastEmail: Email address of dedicated user for this integraion\n - mimecastPassword: Password for dedicated user\n - mimecastAppId: Application Id from the Microsoft Sentinel app registered with Mimecast\n - mimecastAppKey: Application Key from the Microsoft Sentinel app registered with Mimecast\n - mimecastAccessKey: Access Key for the dedicated Mimecast user\n - mimecastSecretKey: Secret Key for dedicated Mimecast user\n - mimecastBaseURL: Regional Mimecast API Base URL\n - activeDirectoryAppId: Azure portal ---> App registrations ---> [your_app] ---> Application ID\n - activeDirectoryAppSecret: Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> [your_app_secret]\n - workspaceId: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Workspace ID (or you can copy workspaceId from above) \n - workspaceKey: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Primary Key (or you can copy workspaceKey from above) \n - AppInsightsWorkspaceResourceID : Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Properties ---> Resource ID \n\n >Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n\n6. Go to ***Azure portal ---> Resource groups ---> [your_resource_group] ---> [appName](type: Storage account) ---> Storage Explorer ---> BLOB CONTAINERS ---> TTP checkpoints ---> Upload*** and create empty files on your machine named attachment-checkpoint.txt, impersonation-checkpoint.txt, url-checkpoint.txt and select them for upload (this is done so that date_range for TTP logs are stored in consistent state)\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""You need to have the following pieces of information to configure the integration:\n- mimecastEmail: Email address of a dedicated Mimecast admin user\n- mimecastPassword: Password for the dedicated Mimecast admin user\n- mimecastAppId: API Application Id of the Mimecast Microsoft Sentinel app registered with Mimecast\n- mimecastAppKey: API Application Key of the Mimecast Microsoft Sentinel app registered with Mimecast\n- mimecastAccessKey: Access Key for the dedicated Mimecast admin user\n- mimecastSecretKey: Secret Key for the dedicated Mimecast admin user\n- mimecastBaseURL: Mimecast Regional API Base URL\n\n> The Mimecast Application Id, Application Key, along with the Access Key and Secret keys for the dedicated Mimecast admin user are obtainable via the Mimecast Administration Console: Administration | Services | API and Platform Integrations.\n\n> The Mimecast API Base URL for each region is documented here: https://integrations.mimecast.com/documentation/api-overview/global-base-urls/""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastTTP/Data%20Connectors/MimecastTTP_API_FunctionApp.json","true" +"MimecastTTPUrl_CL","MimecastTTP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastTTP","mimecastnorthamerica1584469118674","azure-sentinel-solution-mimecastttp","2022-02-24","2022-02-24","","Mimecast","Partner","https://mimecastsupport.zendesk.com/","","domains","MimecastTTPAPI","Mimecast","Mimecast Targeted Threat Protection","The data connector for [Mimecast Targeted Threat Protection](https://integrations.mimecast.com/tech-partners/microsoft-sentinel/) provides customers with the visibility into security events related to the Targeted Threat Protection inspection technologies within Microsoft Sentinel. The data connector provides pre-created dashboards to allow analysts to view insight into email based threats, aid in incident correlation and reduce investigation response times coupled with custom alert capabilities.
The Mimecast products included within the connector are:
- URL Protect
- Impersonation Protect
- Attachment Protect
","[{""title"": ""Resource group"", ""description"": ""You need to have a resource group created with a subscription you are going to use.""}, {""title"": ""Functions app"", ""description"": ""You need to have an Azure App registered for this connector to use\n1. Application Id\n2. Tenant Id\n3. Client Id\n4. Client Secret""}, {""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to a Mimecast API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": ""Configuration:"", ""description"": ""**STEP 1 - Configuration steps for the Mimecast API**\n\nGo to ***Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> New client secret*** and create a new secret (save the Value somewhere safe right away because you will not be able to preview it later)""}, {""title"": """", ""description"": ""**STEP 2 - Deploy Mimecast API Connector**\n\n>**IMPORTANT:** Before deploying the Mimecast API connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Mimecast API authorization key(s) or Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Deploy the Mimecast Targeted Threat Protection Data Connector:"", ""description"": ""\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MimecastTTP-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the following fields:\n - appName: Unique string that will be used as id for the app in Azure platform\n - objectId: Azure portal ---> Azure Active Directory ---> more info ---> Profile -----> Object ID\n - appInsightsLocation(default): westeurope\n - mimecastEmail: Email address of dedicated user for this integraion\n - mimecastPassword: Password for dedicated user\n - mimecastAppId: Application Id from the Microsoft Sentinel app registered with Mimecast\n - mimecastAppKey: Application Key from the Microsoft Sentinel app registered with Mimecast\n - mimecastAccessKey: Access Key for the dedicated Mimecast user\n - mimecastSecretKey: Secret Key for dedicated Mimecast user\n - mimecastBaseURL: Regional Mimecast API Base URL\n - activeDirectoryAppId: Azure portal ---> App registrations ---> [your_app] ---> Application ID\n - activeDirectoryAppSecret: Azure portal ---> App registrations ---> [your_app] ---> Certificates & secrets ---> [your_app_secret]\n - workspaceId: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Workspace ID (or you can copy workspaceId from above) \n - workspaceKey: Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Agents ---> Primary Key (or you can copy workspaceKey from above) \n - AppInsightsWorkspaceResourceID : Azure portal ---> Log Analytics Workspaces ---> [Your workspace] ---> Properties ---> Resource ID \n\n >Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n\n6. Go to ***Azure portal ---> Resource groups ---> [your_resource_group] ---> [appName](type: Storage account) ---> Storage Explorer ---> BLOB CONTAINERS ---> TTP checkpoints ---> Upload*** and create empty files on your machine named attachment-checkpoint.txt, impersonation-checkpoint.txt, url-checkpoint.txt and select them for upload (this is done so that date_range for TTP logs are stored in consistent state)\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""You need to have the following pieces of information to configure the integration:\n- mimecastEmail: Email address of a dedicated Mimecast admin user\n- mimecastPassword: Password for the dedicated Mimecast admin user\n- mimecastAppId: API Application Id of the Mimecast Microsoft Sentinel app registered with Mimecast\n- mimecastAppKey: API Application Key of the Mimecast Microsoft Sentinel app registered with Mimecast\n- mimecastAccessKey: Access Key for the dedicated Mimecast admin user\n- mimecastSecretKey: Secret Key for the dedicated Mimecast admin user\n- mimecastBaseURL: Mimecast Regional API Base URL\n\n> The Mimecast Application Id, Application Key, along with the Access Key and Secret keys for the dedicated Mimecast admin user are obtainable via the Mimecast Administration Console: Administration | Services | API and Platform Integrations.\n\n> The Mimecast API Base URL for each region is documented here: https://integrations.mimecast.com/documentation/api-overview/global-base-urls/""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MimecastTTP/Data%20Connectors/MimecastTTP_API_FunctionApp.json","true" +"","Minemeld","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Minemeld","azuresentinel","azure-sentinel-solution-minemeld","2022-10-11","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"MDBALogTable_CL","MongoDBAtlas","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MongoDBAtlas","mongodb","azure-sentinel-solution-mongodbatlas","2025-08-22","","","MongoDB","Partner","https://www.mongodb.com/company/contact","","domains","MongoDBAtlasLogsAzureFunctions","MongoDB","MongoDB Atlas Logs","The [MongoDBAtlas](https://www.mongodb.com/products/platform/atlas-database) Logs connector gives the capability to upload MongoDB Atlas database logs into Microsoft Sentinel through the MongoDB Atlas Administration API. Refer to the [API documentation](https://www.mongodb.com/docs/api/doc/atlas-admin-api-v2/) for more information. The connector provides the ability to get a range of database log messages for the specified hosts and specified project.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to 'MongoDB Atlas' to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": "">Ensure the workspace is added to Microsoft Sentinel before deploying the connector.""}, {""title"": ""STEP 1 - Configuration steps for the 'MongoDB Atlas Administration API'"", ""description"": ""1. [Follow these instructions](https://www.mongodb.com/docs/atlas/configure-api-access/#grant-programmatic-access-to-an-organization) to create a MongoDB Atlas service account.\n2. Copy the **Client ID** and **Client Secret** you created, also the **Group ID** (Project) and each **Cluster ID** (Hostname) required for later steps.\n3. Refer [MongoDB Atlas API documentation](https://www.mongodb.com/docs/api/doc/atlas-admin-api-v2/operation/operation-downloadgroupclusterlog) for more details.\n4. The client secret can be passed into the connector via an Azure key vault or directly into the connector.\n5. If you want to use the key vault option create a key vault, using a Vault Access Policy, with a secret named **mongodb-client-secret** and your client secret saved as the secret value.""}, {""title"": ""STEP 2 - Deploy the 'MongoDB Atlas Logs' connector and the associated Azure Function"", ""description"": ""\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#view/Microsoft_Azure_CreateUIDef/CustomDeploymentBlade/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FMongoDBAtlas%2FData%20Connectors%2FMongoDBAtlasLogs%2Fazuredeploy_Connector_MongoDBAtlasLogs_AzureFunction.json/uiFormDefinitionUri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FMongoDBAtlas%2FData%20Connectors%2FMongoDBAtlasLogs%2FcreateUiDef.json)""}, {""title"": ""STEP 3 - Set the connector parameters"", ""description"": ""1. Select the preferred **Subscription** and an existing **Resource Group**.\n2. Enter an existing **Log Analytics Workspace Resource ID** belonging to the resource group.\n3. Click **Next**\n4. Enter the **MongoDB Group ID**, a list of up to 10 **MongoDB Cluster IDs**, each on a separate line, and **MongoDB Client ID**.\n5. Choose for **Authentication Method** either **Client Secret** and copy in your client secret value or **Key Vault** and copy in the name of your key vault. \nClick **Next** \n6. Review the MongoDB filters. Select logs from at least one category. Click **Next** \n7. Review the schedule. Click **Next** \n8. Review the settings then click **Create**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""MongoDB Atlas service account **Client ID** and **Client Secret** are required. [See the documentation to learn more about creating a service account](https://www.mongodb.com/docs/atlas/configure-api-access/#grant-programmatic-access-to-an-organization)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MongoDBAtlas/Data%20Connectors/MongoDBAtlasLogs/MongoDBAtlasLogs_AzureFunction.json","true" +"MongoDBAudit_CL","MongoDBAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MongoDBAudit","azuresentinel","azure-sentinel-solution-mongodbaudit","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MongoDB","MongoDB","[Deprecated] MongoDB Audit","MongoDB data connector provides the capability to ingest [MongoDBAudit](https://www.mongodb.com/) into Microsoft Sentinel. Refer to [MongoDB documentation](https://www.mongodb.com/docs/manual/tutorial/getting-started/) for more information.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias MongoDBAudit and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MongoDBAudit/Parsers/MongoDBAudit.txt) on the second line of the query, enter the hostname(s) of your MongoDBAudit device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Tomcat Server where the logs are generated.\n\n> Logs from MongoDB Enterprise Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure MongoDBAudit to write logs to files"", ""description"": ""Edit mongod.conf file (for Linux) or mongod.cfg (for Windows) to write logs to files:\n\n>**dbPath**: data/db\n\n>**path**: data/db/auditLog.json\n\nSet the following parameters: **dbPath** and **path**. Refer to the [MongoDB documentation for more details](https://www.mongodb.com/docs/manual/tutorial/configure-auditing/)""}, {""title"": ""3. Configure the logs to be collected"", ""description"": ""Configure the custom log directory to be collected"", ""instructions"": [{""parameters"": {""linkType"": ""OpenCustomLogsSettings""}, ""type"": ""InstallAgent""}]}, {""title"": """", ""description"": ""1. Select the link above to open your workspace advanced settings \n2. From the left pane, select **Settings**, select **Custom Logs** and click **+Add custom log**\n3. Click **Browse** to upload a sample of a MongoDBAudit log file. Then, click **Next >**\n4. Select **Timestamp** as the record delimiter and click **Next >**\n5. Select **Windows** or **Linux** and enter the path to MongoDBAudit logs based on your configuration \n6. After entering the path, click the '+' symbol to apply, then click **Next >** \n7. Add **MongoDBAudit** as the custom log Name (the '_CL' suffix will be added automatically) and click **Done**.""}, {""title"": ""Validate connectivity"", ""description"": ""It may take upwards of 20 minutes until your logs start to appear in Microsoft Sentinel.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/MongoDBAudit/Data%20Connectors/Connector_MongoDBAudit.json","true" +"MorphisecAlerts_CL","Morphisec","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Morphisec","morphisec","morphisec_utpp_mss","2022-05-05","","","Morphisec","Partner","https://support.morphisec.com/support/home","","domains","MorphisecCCF","Morphisec","Morphisec API Data Connector (via Codeless Connector Framework)","The [Morphisec](https://www.morphisec.com/) solution for Microsoft Sentinel enables you to seamlessly ingest security alerts directly from the Morphisec API. By leveraging Morphisec's proactive breach prevention and moving target defense capabilities, this integration enriches your security operations with high-fidelity, low-noise alerts on evasive threats.
This solution provides more than just data ingestion; it equips your security team with a full suite of ready-to-use content, including: Data Connector, ASIM Parser, Analytic Rule Templates and Workbook.
With this solution, you can empower your SOC to leverage Morphisec's powerful threat prevention within a unified investigation and response workflow in Microsoft Sentinel.","[{""title"": ""Configure Morphisec Connector"", ""description"": ""1. Create an API key client in Morphisec Console with read permissions to fetch alerts. \n2. Provide the Client ID and Client Secret in the connector configuration."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Morphisec Base URL"", ""placeholder"": ""https://.morphisec.cloud"", ""type"": ""text"", ""name"": ""baseUrl""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client ID"", ""placeholder"": ""Enter the Client ID"", ""type"": ""text"", ""name"": ""clientId""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client Secret"", ""placeholder"": ""Enter the Client Secret"", ""type"": ""password"", ""name"": ""secret""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Tenant ID"", ""placeholder"": ""Enter your Morphisec Tenant ID"", ""type"": ""text"", ""name"": ""tenantId""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect to Morphisec"", ""name"": ""toggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Morphisec/Data%20Connectors/Morphisec_CCF/Morphisec_ConnectorDefinition.json","true" +"MuleSoft_Cloudhub_CL","Mulesoft","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mulesoft","azuresentinel","azure-sentinel-solution-mulesoft","2022-07-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","MuleSoft","MuleSoft","MuleSoft Cloudhub","The [MuleSoft Cloudhub](https://www.mulesoft.com/platform/saas/cloudhub-ipaas-cloud-based-integration) data connector provides the capability to retrieve logs from Cloudhub applications using the Cloudhub API and more events into Microsoft Sentinel through the REST API. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Azure Blob Storage API to pull logs into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**MuleSoftCloudhub**](https://aka.ms/sentinel-MuleSoftCloudhub-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**Note: This data connector fetch only the logs of the CloudHub application using Platform API and not of CloudHub 2.0 application**""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the MuleSoft Cloudhub API**\n\n Follow the instructions to obtain the credentials.\n\n1. Obtain the **MuleSoftEnvId**, **MuleSoftAppName**, **MuleSoftUsername** and **MuleSoftPassword** using the [documentation](https://help.mulesoft.com/s/article/How-to-get-Cloudhub-application-information-using-Anypoint-Platform-API).\n2. Save credentials for using in the data connector.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the MuleSoft Cloudhub data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": ""**Option 1 - Azure Resource Manager (ARM) Template**\n\nUse this method for automated deployment of the MuleSoft Cloudhub data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-MuleSoftCloudhubAPI-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **MuleSoftEnvId**, **MuleSoftAppName**, **MuleSoftUsername** and **MuleSoftPassword** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": """", ""description"": ""**Option 2 - Manual Deployment of Azure Functions**\n\n Use the following step-by-step instructions to deploy the MuleSoft Cloudhub data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-MuleSoftCloudhubAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. MuleSoftXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tMuleSoftEnvId\n\t\tMuleSoftAppName\n\t\tMuleSoftUsername\n\t\tMuleSoftPassword\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**MuleSoftEnvId**, **MuleSoftAppName**, **MuleSoftUsername** and **MuleSoftPassword** are required for making API calls.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Mulesoft/Data%20Connectors/MuleSoft_Cloudhub_API_FunctionApp.json","true" +"","Multi Cloud Attack Coverage Essentials - Resource Abuse","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Multi%20Cloud%20Attack%20Coverage%20Essentials%20-%20Resource%20Abuse","azuresentinel","azure-sentinel-solution-multicloudattackcoverage","2023-11-22","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","NCSC-NL NDN Cyber Threat Intelligence Sharing","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NCSC-NL%20NDN%20Cyber%20Threat%20Intelligence%20Sharing","azuresentinel","azure-sentinel-solution-ncscnlndncti","2025-05-19","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"NGINX_CL","NGINX HTTP Server","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NGINX%20HTTP%20Server","azuresentinel","azure-sentinel-solution-nginx","2021-12-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","NGINXHTTPServer","Nginx","[Deprecated] NGINX HTTP Server","The NGINX HTTP Server data connector provides the capability to ingest [NGINX](https://nginx.org/en/) HTTP Server events into Microsoft Sentinel. Refer to [NGINX Logs documentation](https://nginx.org/en/docs/http/ngx_http_log_module.html) for more information.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias NGINXHTTPServer and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NGINX%20HTTP%20Server/Parsers/NGINXHTTPServer.txt).The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the NGINX HTTP Server where the logs are generated.\n\n> Logs from NGINX HTTP Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the custom log directory to be collected"", ""instructions"": [{""parameters"": {""linkType"": ""OpenCustomLogsSettings""}, ""type"": ""InstallAgent""}]}, {""title"": """", ""description"": ""1. Select the link above to open your workspace advanced settings \n2. From the left pane, select **Data**, select **Custom Logs** and click **Add+**\n3. Click **Browse** to upload a sample of a NGINX HTTP Server log file (e.g. access.log or error.log). Then, click **Next >**\n4. Select **New line** as the record delimiter and click **Next >**\n5. Select **Windows** or **Linux** and enter the path to NGINX HTTP logs based on your configuration. Example: \n - **Linux** Directory: '/var/log/nginx/*.log' \n6. After entering the path, click the '+' symbol to apply, then click **Next >** \n7. Add **NGINX_CL** as the custom log Name and click **Done**""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NGINX%20HTTP%20Server/Data%20Connectors/Connector_NGINX_agent.json","true" +"","NISTSP80053","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NISTSP80053","azuresentinel","azure-sentinel-solution-nistsp80053","2022-02-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"BSMmacOS_CL","NXLog BSM macOS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NXLog%20BSM%20macOS","nxlogltd1589381969261","nxlog_bsm_macos_mss","2022-05-02","","","NXLog","Partner","https://nxlog.co/support-tickets/add/support-ticket","","domains","NXLogBSMmacOS","NXLog","NXLog BSM macOS","The [NXLog BSM](https://docs.nxlog.co/refman/current/im/bsm.html) macOS data connector uses Sun's Basic Security Module (BSM) Auditing API to read events directly from the kernel for capturing audit events on the macOS platform. This REST API connector can efficiently export macOS audit events to Microsoft Sentinel in real-time.","[{""title"": """", ""description"": ""Follow the step-by-step instructions in the *NXLog User Guide* Integration Topic [Microsoft Sentinel](https://docs.nxlog.co/userguide/integrate/microsoft-azure-sentinel.html) to configure this connector."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NXLog%20BSM%20macOS/Data%20Connectors/NXLogBSMmacOS.json","true" +"NXLogFIM_CL","NXLog FIM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NXLog%20FIM","nxlogltd1589381969261","nxlog_fim","2022-08-15","","","NXLog","Partner","https://nxlog.co/support-tickets/add/support-ticket","","domains","NXLogFIM","NXLog","NXLog FIM","The [NXLog FIM](https://docs.nxlog.co/refman/current/im/fim.html) module allows for the scanning of files and directories, reporting detected additions, changes, renames and deletions on the designated paths through calculated checksums during successive scans. This REST API connector can efficiently export the configured FIM events to Microsoft Sentinel in real time.","[{""title"": """", ""description"": ""Follow the step-by-step instructions in the [Microsoft Sentinel](https://docs.nxlog.co/userguide/integrate/microsoft-azure-sentinel.html) integration chapter of the *NXLog User Guide* to configure this connector."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NXLog%20FIM/Data%20Connectors/NXLogFIM.json","true" +"LinuxAudit_CL","NXLog LinuxAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NXLog%20LinuxAudit","nxlogltd1589381969261","nxlog_linuxaudit_mss","2022-05-05","","","NXLog","Partner","https://nxlog.co/support-tickets/add/support-ticket","","domains","NXLogLinuxAudit","NXLog","NXLog LinuxAudit","The [NXLog LinuxAudit](https://docs.nxlog.co/refman/current/im/linuxaudit.html) data connector supports custom audit rules and collects logs without auditd or any other user-space software. IP addresses and group/user IDs are resolved to their respective names making [Linux audit](https://docs.nxlog.co/userguide/integrate/linux-audit.html) logs more intelligible to security analysts. This REST API connector can efficiently export Linux security events to Microsoft Sentinel in real-time.","[{""title"": """", ""description"": ""Follow the step-by-step instructions in the *NXLog User Guide* Integration Topic [Microsoft Sentinel](https://docs.nxlog.co/userguide/integrate/microsoft-azure-sentinel.html) to configure this connector."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NXLog%20LinuxAudit/Data%20Connectors/NXLogLinuxAudit.json","true" +"AIX_Audit_CL","NXLogAixAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NXLogAixAudit","nxlogltd1589381969261","nxlog_aix_audit","2022-05-05","","","NXLog","Partner","https://nxlog.co/support-tickets/add/support-ticket","","domains","NXLogAixAudit","NXLog","NXLog AIX Audit","The [NXLog AIX Audit](https://docs.nxlog.co/refman/current/im/aixaudit.html) data connector uses the AIX Audit subsystem to read events directly from the kernel for capturing audit events on the AIX platform. This REST API connector can efficiently export AIX Audit events to Microsoft Sentinel in real time.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**NXLog_parsed_AIX_Audit_view**](https://aka.ms/sentinel-nxlogaixaudit-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": ""Follow the step-by-step instructions in the *NXLog User Guide* Integration Guide [Microsoft Sentinel](https://docs.nxlog.co/userguide/integrate/microsoft-azure-sentinel.html) to configure this connector."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NXLogAixAudit/Data%20Connectors/NXLogAixAudit.json","true" +"NXLog_DNS_Server_CL","NXLogDnsLogs","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NXLogDnsLogs","nxlogltd1589381969261","nxlog_dns_logs","2022-05-24","","","NXLog","Partner","https://nxlog.co/support-tickets/add/support-ticket","","domains","NXLogDNSLogs","NXLog","NXLog DNS Logs","The NXLog DNS Logs data connector uses Event Tracing for Windows ([ETW](https://docs.microsoft.com/windows/apps/trace-processing/overview)) for collecting both Audit and Analytical DNS Server events. The [NXLog *im_etw* module](https://docs.nxlog.co/refman/current/im/etw.html) reads event tracing data directly for maximum efficiency, without the need to capture the event trace into an .etl file. This REST API connector can forward DNS Server events to Microsoft Sentinel in real time.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on Kusto functions deployed with the Microsoft Sentinel Solution to work as expected. The [**ASimDnsMicrosoftNXLog **](https://aka.ms/sentinel-nxlogdnslogs-parser) is designed to leverage Microsoft Sentinel's built-in DNS-related analytics capabilities."", ""instructions"": []}, {""title"": """", ""description"": ""Follow the step-by-step instructions in the *NXLog User Guide* Integration Topic [Microsoft Sentinel](https://docs.nxlog.co/userguide/integrate/microsoft-azure-sentinel.html) to configure this connector."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NXLogDnsLogs/Data%20Connectors/NXLogDnsLogs.json","true" +"Nasuni","Nasuni","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Nasuni","nasunicorporation","nasuni-sentinel","2023-07-07","2023-07-07","","Nasuni","Partner","https://github.com/nasuni-labs/Azure-Sentinel","","domains","NasuniEdgeAppliance","Nasuni","[Deprecated] Nasuni Edge Appliance","The [Nasuni](https://www.nasuni.com/) connector allows you to easily connect your Nasuni Edge Appliance Notifications and file system audit logs with Microsoft Sentinel. This gives you more insight into activity within your Nasuni infrastructure and improves your security operation capabilities.","[{""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Follow the configuration steps below to configure your Linux machine to send Nasuni event information to Microsoft Sentinel. Refer to the [Azure Monitor Agent documenation](https://learn.microsoft.com/en-us/azure/azure-monitor/agents/agents-overview) for additional details on these steps.\nConfigure the facilities you want to collect and their severities.\n1. Select the link below to open your workspace agents configuration, and select the Syslog tab.\n2. Select Add facility and choose from the drop-down list of facilities. Repeat for all the facilities you want to add.\n3. Mark the check boxes for the desired severities for each facility.\n4. Click Apply.\n"", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure Nasuni Edge Appliance settings"", ""description"": ""Follow the instructions in the [Nasuni Management Console Guide](https://view.highspot.com/viewer/629a633ae5b4caaf17018daa?iid=5e6fbfcbc7143309f69fcfcf) to configure Nasuni Edge Appliances to forward syslog events. Use the IP address or hostname of the Linux device running the Azure Monitor Agent in the Servers configuration field for the syslog settings.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Nasuni/Data%20Connectors/Nasuni%20Data%20Connector.json","true" +"Syslog","Nasuni","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Nasuni","nasunicorporation","nasuni-sentinel","2023-07-07","2023-07-07","","Nasuni","Partner","https://github.com/nasuni-labs/Azure-Sentinel","","domains","NasuniEdgeAppliance","Nasuni","[Deprecated] Nasuni Edge Appliance","The [Nasuni](https://www.nasuni.com/) connector allows you to easily connect your Nasuni Edge Appliance Notifications and file system audit logs with Microsoft Sentinel. This gives you more insight into activity within your Nasuni infrastructure and improves your security operation capabilities.","[{""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Follow the configuration steps below to configure your Linux machine to send Nasuni event information to Microsoft Sentinel. Refer to the [Azure Monitor Agent documenation](https://learn.microsoft.com/en-us/azure/azure-monitor/agents/agents-overview) for additional details on these steps.\nConfigure the facilities you want to collect and their severities.\n1. Select the link below to open your workspace agents configuration, and select the Syslog tab.\n2. Select Add facility and choose from the drop-down list of facilities. Repeat for all the facilities you want to add.\n3. Mark the check boxes for the desired severities for each facility.\n4. Click Apply.\n"", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure Nasuni Edge Appliance settings"", ""description"": ""Follow the instructions in the [Nasuni Management Console Guide](https://view.highspot.com/viewer/629a633ae5b4caaf17018daa?iid=5e6fbfcbc7143309f69fcfcf) to configure Nasuni Edge Appliances to forward syslog events. Use the IP address or hostname of the Linux device running the Azure Monitor Agent in the Servers configuration field for the syslog settings.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Nasuni/Data%20Connectors/Nasuni%20Data%20Connector.json","true" +"Netclean_Incidents_CL","NetClean ProActive","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NetClean%20ProActive","netcleantechnologiesab1651557549734","azure-sentinel-solution-netclean-proactive","2022-06-30","","","NetClean","Partner","https://www.netclean.com/contact","","domains","Netclean_ProActive_Incidents","NetClean Technologies","Netclean ProActive Incidents","This connector uses the Netclean Webhook (required) and Logic Apps to push data into Microsoft Sentinel Log Analytics","[{""title"": """", ""description"": "">**NOTE:** NetClean ProActive uses a Webhook to expose incident data, Azure Logic Apps is used to receive and push data to Log Analytics This might result in additional data ingestion costs.\n It's possible to test this without Logic Apps or NetClean Proactive see option 2"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""1. Create a new logic app\n Use When a HTTP request is recived as the Trigger and save it. It will now have generated a URL that can be used in the ProActive webconsole configuration.\n Add an action:\n Select the Azure Log Analytics Data Collector and choose Send Data\n Enter Connection Name, Workspace ID and Workspace Key, you will find the information needed in your Log Analytics workspace under Settings-->Agents-->Log Analytics agent instructions.\n In JSON Request body add @triggerBody(). in Custom Log Name add Netclean_Incidents."", ""title"": "" Option 1: Logic app""}, {""description"": ""Ingest data using a api function. please use the script found on\n https://learn.microsoft.com/en-us/azure/azure-monitor/logs/data-collector-api?tabs=powershell \nReplace the CustomerId and SharedKey values with your values\nReplace the content in $json variable to the sample data found here: https://github.com/Azure/Azure-Sentinel/blob/master/Sample%20Data/Custom/Netclean_Incidents_CL.json .\nSet the LogType varible to **Netclean_Incidents_CL**\nRun the script"", ""title"": "" Option 2 (Testing only)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NetClean%20ProActive/Data%20Connectors/Connector_NetClean.json","true" +"Netskope_CL","Netskope","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskope","netskope","netskope_mss","2022-05-05","","","Netskope","Partner","https://www.netskope.com/services#support","","domains","Netskope","Netskope","Netskope","The [Netskope Cloud Security Platform](https://www.netskope.com/platform) connector provides the capability to ingest Netskope logs and events into Microsoft Sentinel. The connector provides visibility into Netskope Platform Events and Alerts in Microsoft Sentinel to improve monitoring and investigation capabilities.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to Netskope to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Netskope and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskope/Parsers/Netskope.txt), on the second line of the query, enter the hostname(s) of your Netskope device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Netskope API**\n\n [Follow these instructions](https://docs.netskope.com/en/rest-api-v1-overview.html) provided by Netskope to obtain an API Token. **Note:** A Netskope account is required""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Netskope connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Netskope API Authorization Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""This method provides an automated deployment of the Netskope connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-netskope-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **API Key**, and **URI**.\n - Use the following schema for the `uri` value: `https://.goskope.com` Replace `` with your domain.\n - The default **Time Interval** is set to pull the last five (5) minutes of data. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion.\n - The default **Log Types** is set to pull all 6 available log types (`alert, page, application, audit, infrastructure, network`), remove any are not required. \n - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n6. After successfully deploying the connector, download the Kusto Function to normalize the data fields. [Follow the steps](https://aka.ms/sentinelgithubparsersnetskope) to use the Kusto function alias, **Netskope**.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""This method provides the step-by-step instructions to deploy the Netskope connector manually with Azure Function.""}, {""title"": """", ""description"": ""**1. Create a Function App**\n\n1. From the Azure Portal, navigate to [Function App](https://portal.azure.com/#blade/HubsExtension/BrowseResource/resourceType/Microsoft.Web%2Fsites/kind/functionapp), and select **+ Add**.\n2. In the **Basics** tab, ensure Runtime stack is set to **Powershell Core**. \n3. In the **Hosting** tab, ensure the **Consumption (Serverless)** plan type is selected.\n4. Make other preferrable configuration changes, if needed, then click **Create**.""}, {""title"": """", ""description"": ""**2. Import Function App Code**\n\n1. In the newly created Function App, select **Functions** on the left pane and click **+ Add**.\n2. Select **Timer Trigger**.\n3. Enter a unique Function **Name** and modify the cron schedule, if needed. The default value is set to run the Function App every 5 minutes. (Note: the Timer trigger should match the `timeInterval` value below to prevent overlapping data), click **Create**.\n4. Click on **Code + Test** on the left pane. \n5. Copy the [Function App Code](https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Solutions/Netskope/Data%20Connectors/Netskope/AzureFunctionNetskope/run.ps1) and paste into the Function App `run.ps1` editor.\n5. Click **Save**.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following seven (7) application settings individually, with their respective string values (case-sensitive): \n\t\tapikey\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\turi\n\t\ttimeInterval\n\t\tlogTypes\n\t\tlogAnalyticsUri (optional)\n> - Enter the URI that corresponds to your region. The `uri` value must follow the following schema: `https://.goskope.com` - There is no need to add subsquent parameters to the Uri, the Function App will dynamically append the parameteres in the proper format.\n> - Set the `timeInterval` (in minutes) to the default value of `5` to correspond to the default Timer Trigger of every `5` minutes. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly to prevent overlapping data ingestion. \n> - Set the `logTypes` to `alert, page, application, audit, infrastructure, network` - This list represents all the avaliable log types. Select the log types based on logging requirements, seperating each by a single comma.\n> - Note: If using Azure Key Vault, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.\n5. After successfully deploying the connector, download the Kusto Function to normalize the data fields. [Follow the steps](https://aka.ms/sentinelgithubparsersnetskope) to use the Kusto function alias, **Netskope**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Netskope API Token"", ""description"": ""A Netskope API Token is required. [See the documentation to learn more about Netskope API](https://innovatechcloud.goskope.com/docs/Netskope_Help/en/rest-api-v1-overview.html). **Note:** A Netskope account is required""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskope/Data%20Connectors/Netskope/Netskope_API_FunctionApp.json","true" +"NetskopeAlerts_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeAlertsEvents","Netskope","Netskope Alerts and Events","Netskope Security Alerts and Events","[{""title"": ""STEP 1 - Create a Netskope API key."", ""description"": ""Follow the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/) for guidance on this step.""}, {""title"": ""STEP 2 - Enter your Netskope product Details"", ""description"": ""Enter your Netskope organisation url & API Token below:"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Organisation Url"", ""placeholder"": ""Enter your organisation url"", ""type"": ""text"", ""name"": ""OrganisationURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""Enter your API Key"", ""type"": ""password"", ""name"": ""apikey""}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Remediation"", ""type"": ""text"", ""name"": ""NetskopeAlertsRemediationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Remediation data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Uba"", ""type"": ""text"", ""name"": ""NetskopeAlertsUbaingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Uba data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Security Assessment"", ""type"": ""text"", ""name"": ""NetskopeAlertsSecurityAssessmentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Security Assessment data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Quarantine"", ""type"": ""text"", ""name"": ""NetskopeAlertsQuarantineingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Quarantine data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Policy"", ""type"": ""text"", ""name"": ""NetskopeAlertsPolicyingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Policy data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malware"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalwareingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malware data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malsite"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalsiteingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malsite data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts DLP"", ""type"": ""text"", ""name"": ""NetskopeAlertsDlpingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts CTEP"", ""type"": ""text"", ""name"": ""NetskopeAlertsCtepingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts CTEP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Watchlist"", ""type"": ""text"", ""name"": ""NetskopeAlertsWatchlistingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Watchlist data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Compromised Credentials"", ""type"": ""text"", ""name"": ""NetskopeAlertsCompromisedCredentialsingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Compromised Credentials data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Content"", ""type"": ""text"", ""name"": ""NetskopeAlertsContentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Content data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Device"", ""type"": ""text"", ""name"": ""NetskopeAlertsDeviceingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Device data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Application"", ""type"": ""text"", ""name"": ""NetskopeEventsApplicationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Application data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Audit"", ""type"": ""text"", ""name"": ""NetskopeEventsAuditioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Audit data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Connection"", ""type"": ""text"", ""name"": ""NetskopeEventsConnectioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Connection data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events DLP"", ""type"": ""text"", ""name"": ""NetskopeEventsDLPingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Endpoint"", ""type"": ""text"", ""name"": ""NetskopeEventsEndpointingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Endpoint data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Infrastructure"", ""type"": ""text"", ""name"": ""NetskopeEventsInfrastructureingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Infrastructure data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Network"", ""type"": ""text"", ""name"": ""NetskopeEventsNetworkingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Network data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Page"", ""type"": ""text"", ""name"": ""NetskopeEventsPageingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Page data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""InstructionStepsGroup"", ""parameters"": {""instructionSteps"": [{""title"": ""OPTIONAL: Specify the Index the API uses."", ""description"": ""**Configuring the index is optional and only required in advanced scenario's.** \n Netskope uses an [index](https://docs.netskope.com/en/using-the-rest-api-v2-dataexport-iterator-endpoints/#how-do-iterator-endpoints-function) to retrieve events. In some advanced cases (consuming the event in multiple Microsoft Sentinel workspaces, or pre-fatiguing the index to only retrieve recent data), a customer might want to have direct control over the index."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Index"", ""placeholder"": ""NetskopeCCP"", ""type"": ""text"", ""name"": ""Index""}}]}]}}]}, {""title"": ""STEP 3 - Click Connect"", ""description"": ""Verify all fields above were filled in correctly. Press the Connect to connect Netskope to Microsoft Sentinel."", ""instructions"": [{""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Netskope organisation url"", ""description"": ""The Netskope data connector requires you to provide your organisation url. You can find your organisation url by signing into the Netskope portal.""}, {""name"": ""Netskope API key"", ""description"": ""The Netskope data connector requires you to provide a valid API key. You can create one by following the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeAlertsEvents_RestAPI_CCP/NetskopeAlertsEvents_ConnectorDefination.json","true" +"NetskopeEventsApplication_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeAlertsEvents","Netskope","Netskope Alerts and Events","Netskope Security Alerts and Events","[{""title"": ""STEP 1 - Create a Netskope API key."", ""description"": ""Follow the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/) for guidance on this step.""}, {""title"": ""STEP 2 - Enter your Netskope product Details"", ""description"": ""Enter your Netskope organisation url & API Token below:"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Organisation Url"", ""placeholder"": ""Enter your organisation url"", ""type"": ""text"", ""name"": ""OrganisationURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""Enter your API Key"", ""type"": ""password"", ""name"": ""apikey""}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Remediation"", ""type"": ""text"", ""name"": ""NetskopeAlertsRemediationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Remediation data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Uba"", ""type"": ""text"", ""name"": ""NetskopeAlertsUbaingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Uba data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Security Assessment"", ""type"": ""text"", ""name"": ""NetskopeAlertsSecurityAssessmentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Security Assessment data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Quarantine"", ""type"": ""text"", ""name"": ""NetskopeAlertsQuarantineingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Quarantine data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Policy"", ""type"": ""text"", ""name"": ""NetskopeAlertsPolicyingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Policy data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malware"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalwareingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malware data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malsite"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalsiteingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malsite data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts DLP"", ""type"": ""text"", ""name"": ""NetskopeAlertsDlpingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts CTEP"", ""type"": ""text"", ""name"": ""NetskopeAlertsCtepingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts CTEP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Watchlist"", ""type"": ""text"", ""name"": ""NetskopeAlertsWatchlistingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Watchlist data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Compromised Credentials"", ""type"": ""text"", ""name"": ""NetskopeAlertsCompromisedCredentialsingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Compromised Credentials data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Content"", ""type"": ""text"", ""name"": ""NetskopeAlertsContentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Content data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Device"", ""type"": ""text"", ""name"": ""NetskopeAlertsDeviceingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Device data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Application"", ""type"": ""text"", ""name"": ""NetskopeEventsApplicationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Application data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Audit"", ""type"": ""text"", ""name"": ""NetskopeEventsAuditioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Audit data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Connection"", ""type"": ""text"", ""name"": ""NetskopeEventsConnectioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Connection data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events DLP"", ""type"": ""text"", ""name"": ""NetskopeEventsDLPingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Endpoint"", ""type"": ""text"", ""name"": ""NetskopeEventsEndpointingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Endpoint data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Infrastructure"", ""type"": ""text"", ""name"": ""NetskopeEventsInfrastructureingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Infrastructure data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Network"", ""type"": ""text"", ""name"": ""NetskopeEventsNetworkingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Network data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Page"", ""type"": ""text"", ""name"": ""NetskopeEventsPageingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Page data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""InstructionStepsGroup"", ""parameters"": {""instructionSteps"": [{""title"": ""OPTIONAL: Specify the Index the API uses."", ""description"": ""**Configuring the index is optional and only required in advanced scenario's.** \n Netskope uses an [index](https://docs.netskope.com/en/using-the-rest-api-v2-dataexport-iterator-endpoints/#how-do-iterator-endpoints-function) to retrieve events. In some advanced cases (consuming the event in multiple Microsoft Sentinel workspaces, or pre-fatiguing the index to only retrieve recent data), a customer might want to have direct control over the index."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Index"", ""placeholder"": ""NetskopeCCP"", ""type"": ""text"", ""name"": ""Index""}}]}]}}]}, {""title"": ""STEP 3 - Click Connect"", ""description"": ""Verify all fields above were filled in correctly. Press the Connect to connect Netskope to Microsoft Sentinel."", ""instructions"": [{""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Netskope organisation url"", ""description"": ""The Netskope data connector requires you to provide your organisation url. You can find your organisation url by signing into the Netskope portal.""}, {""name"": ""Netskope API key"", ""description"": ""The Netskope data connector requires you to provide a valid API key. You can create one by following the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeAlertsEvents_RestAPI_CCP/NetskopeAlertsEvents_ConnectorDefination.json","true" +"NetskopeEventsAudit_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeAlertsEvents","Netskope","Netskope Alerts and Events","Netskope Security Alerts and Events","[{""title"": ""STEP 1 - Create a Netskope API key."", ""description"": ""Follow the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/) for guidance on this step.""}, {""title"": ""STEP 2 - Enter your Netskope product Details"", ""description"": ""Enter your Netskope organisation url & API Token below:"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Organisation Url"", ""placeholder"": ""Enter your organisation url"", ""type"": ""text"", ""name"": ""OrganisationURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""Enter your API Key"", ""type"": ""password"", ""name"": ""apikey""}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Remediation"", ""type"": ""text"", ""name"": ""NetskopeAlertsRemediationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Remediation data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Uba"", ""type"": ""text"", ""name"": ""NetskopeAlertsUbaingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Uba data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Security Assessment"", ""type"": ""text"", ""name"": ""NetskopeAlertsSecurityAssessmentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Security Assessment data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Quarantine"", ""type"": ""text"", ""name"": ""NetskopeAlertsQuarantineingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Quarantine data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Policy"", ""type"": ""text"", ""name"": ""NetskopeAlertsPolicyingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Policy data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malware"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalwareingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malware data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malsite"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalsiteingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malsite data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts DLP"", ""type"": ""text"", ""name"": ""NetskopeAlertsDlpingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts CTEP"", ""type"": ""text"", ""name"": ""NetskopeAlertsCtepingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts CTEP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Watchlist"", ""type"": ""text"", ""name"": ""NetskopeAlertsWatchlistingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Watchlist data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Compromised Credentials"", ""type"": ""text"", ""name"": ""NetskopeAlertsCompromisedCredentialsingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Compromised Credentials data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Content"", ""type"": ""text"", ""name"": ""NetskopeAlertsContentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Content data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Device"", ""type"": ""text"", ""name"": ""NetskopeAlertsDeviceingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Device data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Application"", ""type"": ""text"", ""name"": ""NetskopeEventsApplicationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Application data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Audit"", ""type"": ""text"", ""name"": ""NetskopeEventsAuditioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Audit data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Connection"", ""type"": ""text"", ""name"": ""NetskopeEventsConnectioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Connection data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events DLP"", ""type"": ""text"", ""name"": ""NetskopeEventsDLPingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Endpoint"", ""type"": ""text"", ""name"": ""NetskopeEventsEndpointingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Endpoint data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Infrastructure"", ""type"": ""text"", ""name"": ""NetskopeEventsInfrastructureingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Infrastructure data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Network"", ""type"": ""text"", ""name"": ""NetskopeEventsNetworkingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Network data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Page"", ""type"": ""text"", ""name"": ""NetskopeEventsPageingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Page data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""InstructionStepsGroup"", ""parameters"": {""instructionSteps"": [{""title"": ""OPTIONAL: Specify the Index the API uses."", ""description"": ""**Configuring the index is optional and only required in advanced scenario's.** \n Netskope uses an [index](https://docs.netskope.com/en/using-the-rest-api-v2-dataexport-iterator-endpoints/#how-do-iterator-endpoints-function) to retrieve events. In some advanced cases (consuming the event in multiple Microsoft Sentinel workspaces, or pre-fatiguing the index to only retrieve recent data), a customer might want to have direct control over the index."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Index"", ""placeholder"": ""NetskopeCCP"", ""type"": ""text"", ""name"": ""Index""}}]}]}}]}, {""title"": ""STEP 3 - Click Connect"", ""description"": ""Verify all fields above were filled in correctly. Press the Connect to connect Netskope to Microsoft Sentinel."", ""instructions"": [{""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Netskope organisation url"", ""description"": ""The Netskope data connector requires you to provide your organisation url. You can find your organisation url by signing into the Netskope portal.""}, {""name"": ""Netskope API key"", ""description"": ""The Netskope data connector requires you to provide a valid API key. You can create one by following the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeAlertsEvents_RestAPI_CCP/NetskopeAlertsEvents_ConnectorDefination.json","true" +"NetskopeEventsConnection_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeAlertsEvents","Netskope","Netskope Alerts and Events","Netskope Security Alerts and Events","[{""title"": ""STEP 1 - Create a Netskope API key."", ""description"": ""Follow the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/) for guidance on this step.""}, {""title"": ""STEP 2 - Enter your Netskope product Details"", ""description"": ""Enter your Netskope organisation url & API Token below:"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Organisation Url"", ""placeholder"": ""Enter your organisation url"", ""type"": ""text"", ""name"": ""OrganisationURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""Enter your API Key"", ""type"": ""password"", ""name"": ""apikey""}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Remediation"", ""type"": ""text"", ""name"": ""NetskopeAlertsRemediationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Remediation data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Uba"", ""type"": ""text"", ""name"": ""NetskopeAlertsUbaingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Uba data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Security Assessment"", ""type"": ""text"", ""name"": ""NetskopeAlertsSecurityAssessmentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Security Assessment data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Quarantine"", ""type"": ""text"", ""name"": ""NetskopeAlertsQuarantineingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Quarantine data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Policy"", ""type"": ""text"", ""name"": ""NetskopeAlertsPolicyingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Policy data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malware"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalwareingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malware data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malsite"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalsiteingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malsite data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts DLP"", ""type"": ""text"", ""name"": ""NetskopeAlertsDlpingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts CTEP"", ""type"": ""text"", ""name"": ""NetskopeAlertsCtepingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts CTEP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Watchlist"", ""type"": ""text"", ""name"": ""NetskopeAlertsWatchlistingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Watchlist data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Compromised Credentials"", ""type"": ""text"", ""name"": ""NetskopeAlertsCompromisedCredentialsingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Compromised Credentials data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Content"", ""type"": ""text"", ""name"": ""NetskopeAlertsContentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Content data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Device"", ""type"": ""text"", ""name"": ""NetskopeAlertsDeviceingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Device data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Application"", ""type"": ""text"", ""name"": ""NetskopeEventsApplicationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Application data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Audit"", ""type"": ""text"", ""name"": ""NetskopeEventsAuditioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Audit data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Connection"", ""type"": ""text"", ""name"": ""NetskopeEventsConnectioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Connection data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events DLP"", ""type"": ""text"", ""name"": ""NetskopeEventsDLPingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Endpoint"", ""type"": ""text"", ""name"": ""NetskopeEventsEndpointingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Endpoint data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Infrastructure"", ""type"": ""text"", ""name"": ""NetskopeEventsInfrastructureingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Infrastructure data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Network"", ""type"": ""text"", ""name"": ""NetskopeEventsNetworkingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Network data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Page"", ""type"": ""text"", ""name"": ""NetskopeEventsPageingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Page data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""InstructionStepsGroup"", ""parameters"": {""instructionSteps"": [{""title"": ""OPTIONAL: Specify the Index the API uses."", ""description"": ""**Configuring the index is optional and only required in advanced scenario's.** \n Netskope uses an [index](https://docs.netskope.com/en/using-the-rest-api-v2-dataexport-iterator-endpoints/#how-do-iterator-endpoints-function) to retrieve events. In some advanced cases (consuming the event in multiple Microsoft Sentinel workspaces, or pre-fatiguing the index to only retrieve recent data), a customer might want to have direct control over the index."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Index"", ""placeholder"": ""NetskopeCCP"", ""type"": ""text"", ""name"": ""Index""}}]}]}}]}, {""title"": ""STEP 3 - Click Connect"", ""description"": ""Verify all fields above were filled in correctly. Press the Connect to connect Netskope to Microsoft Sentinel."", ""instructions"": [{""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Netskope organisation url"", ""description"": ""The Netskope data connector requires you to provide your organisation url. You can find your organisation url by signing into the Netskope portal.""}, {""name"": ""Netskope API key"", ""description"": ""The Netskope data connector requires you to provide a valid API key. You can create one by following the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeAlertsEvents_RestAPI_CCP/NetskopeAlertsEvents_ConnectorDefination.json","true" +"NetskopeEventsDLP_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeAlertsEvents","Netskope","Netskope Alerts and Events","Netskope Security Alerts and Events","[{""title"": ""STEP 1 - Create a Netskope API key."", ""description"": ""Follow the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/) for guidance on this step.""}, {""title"": ""STEP 2 - Enter your Netskope product Details"", ""description"": ""Enter your Netskope organisation url & API Token below:"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Organisation Url"", ""placeholder"": ""Enter your organisation url"", ""type"": ""text"", ""name"": ""OrganisationURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""Enter your API Key"", ""type"": ""password"", ""name"": ""apikey""}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Remediation"", ""type"": ""text"", ""name"": ""NetskopeAlertsRemediationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Remediation data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Uba"", ""type"": ""text"", ""name"": ""NetskopeAlertsUbaingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Uba data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Security Assessment"", ""type"": ""text"", ""name"": ""NetskopeAlertsSecurityAssessmentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Security Assessment data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Quarantine"", ""type"": ""text"", ""name"": ""NetskopeAlertsQuarantineingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Quarantine data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Policy"", ""type"": ""text"", ""name"": ""NetskopeAlertsPolicyingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Policy data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malware"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalwareingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malware data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malsite"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalsiteingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malsite data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts DLP"", ""type"": ""text"", ""name"": ""NetskopeAlertsDlpingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts CTEP"", ""type"": ""text"", ""name"": ""NetskopeAlertsCtepingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts CTEP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Watchlist"", ""type"": ""text"", ""name"": ""NetskopeAlertsWatchlistingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Watchlist data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Compromised Credentials"", ""type"": ""text"", ""name"": ""NetskopeAlertsCompromisedCredentialsingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Compromised Credentials data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Content"", ""type"": ""text"", ""name"": ""NetskopeAlertsContentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Content data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Device"", ""type"": ""text"", ""name"": ""NetskopeAlertsDeviceingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Device data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Application"", ""type"": ""text"", ""name"": ""NetskopeEventsApplicationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Application data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Audit"", ""type"": ""text"", ""name"": ""NetskopeEventsAuditioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Audit data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Connection"", ""type"": ""text"", ""name"": ""NetskopeEventsConnectioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Connection data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events DLP"", ""type"": ""text"", ""name"": ""NetskopeEventsDLPingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Endpoint"", ""type"": ""text"", ""name"": ""NetskopeEventsEndpointingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Endpoint data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Infrastructure"", ""type"": ""text"", ""name"": ""NetskopeEventsInfrastructureingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Infrastructure data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Network"", ""type"": ""text"", ""name"": ""NetskopeEventsNetworkingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Network data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Page"", ""type"": ""text"", ""name"": ""NetskopeEventsPageingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Page data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""InstructionStepsGroup"", ""parameters"": {""instructionSteps"": [{""title"": ""OPTIONAL: Specify the Index the API uses."", ""description"": ""**Configuring the index is optional and only required in advanced scenario's.** \n Netskope uses an [index](https://docs.netskope.com/en/using-the-rest-api-v2-dataexport-iterator-endpoints/#how-do-iterator-endpoints-function) to retrieve events. In some advanced cases (consuming the event in multiple Microsoft Sentinel workspaces, or pre-fatiguing the index to only retrieve recent data), a customer might want to have direct control over the index."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Index"", ""placeholder"": ""NetskopeCCP"", ""type"": ""text"", ""name"": ""Index""}}]}]}}]}, {""title"": ""STEP 3 - Click Connect"", ""description"": ""Verify all fields above were filled in correctly. Press the Connect to connect Netskope to Microsoft Sentinel."", ""instructions"": [{""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Netskope organisation url"", ""description"": ""The Netskope data connector requires you to provide your organisation url. You can find your organisation url by signing into the Netskope portal.""}, {""name"": ""Netskope API key"", ""description"": ""The Netskope data connector requires you to provide a valid API key. You can create one by following the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeAlertsEvents_RestAPI_CCP/NetskopeAlertsEvents_ConnectorDefination.json","true" +"NetskopeEventsEndpoint_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeAlertsEvents","Netskope","Netskope Alerts and Events","Netskope Security Alerts and Events","[{""title"": ""STEP 1 - Create a Netskope API key."", ""description"": ""Follow the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/) for guidance on this step.""}, {""title"": ""STEP 2 - Enter your Netskope product Details"", ""description"": ""Enter your Netskope organisation url & API Token below:"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Organisation Url"", ""placeholder"": ""Enter your organisation url"", ""type"": ""text"", ""name"": ""OrganisationURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""Enter your API Key"", ""type"": ""password"", ""name"": ""apikey""}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Remediation"", ""type"": ""text"", ""name"": ""NetskopeAlertsRemediationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Remediation data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Uba"", ""type"": ""text"", ""name"": ""NetskopeAlertsUbaingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Uba data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Security Assessment"", ""type"": ""text"", ""name"": ""NetskopeAlertsSecurityAssessmentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Security Assessment data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Quarantine"", ""type"": ""text"", ""name"": ""NetskopeAlertsQuarantineingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Quarantine data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Policy"", ""type"": ""text"", ""name"": ""NetskopeAlertsPolicyingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Policy data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malware"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalwareingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malware data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malsite"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalsiteingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malsite data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts DLP"", ""type"": ""text"", ""name"": ""NetskopeAlertsDlpingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts CTEP"", ""type"": ""text"", ""name"": ""NetskopeAlertsCtepingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts CTEP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Watchlist"", ""type"": ""text"", ""name"": ""NetskopeAlertsWatchlistingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Watchlist data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Compromised Credentials"", ""type"": ""text"", ""name"": ""NetskopeAlertsCompromisedCredentialsingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Compromised Credentials data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Content"", ""type"": ""text"", ""name"": ""NetskopeAlertsContentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Content data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Device"", ""type"": ""text"", ""name"": ""NetskopeAlertsDeviceingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Device data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Application"", ""type"": ""text"", ""name"": ""NetskopeEventsApplicationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Application data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Audit"", ""type"": ""text"", ""name"": ""NetskopeEventsAuditioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Audit data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Connection"", ""type"": ""text"", ""name"": ""NetskopeEventsConnectioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Connection data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events DLP"", ""type"": ""text"", ""name"": ""NetskopeEventsDLPingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Endpoint"", ""type"": ""text"", ""name"": ""NetskopeEventsEndpointingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Endpoint data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Infrastructure"", ""type"": ""text"", ""name"": ""NetskopeEventsInfrastructureingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Infrastructure data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Network"", ""type"": ""text"", ""name"": ""NetskopeEventsNetworkingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Network data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Page"", ""type"": ""text"", ""name"": ""NetskopeEventsPageingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Page data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""InstructionStepsGroup"", ""parameters"": {""instructionSteps"": [{""title"": ""OPTIONAL: Specify the Index the API uses."", ""description"": ""**Configuring the index is optional and only required in advanced scenario's.** \n Netskope uses an [index](https://docs.netskope.com/en/using-the-rest-api-v2-dataexport-iterator-endpoints/#how-do-iterator-endpoints-function) to retrieve events. In some advanced cases (consuming the event in multiple Microsoft Sentinel workspaces, or pre-fatiguing the index to only retrieve recent data), a customer might want to have direct control over the index."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Index"", ""placeholder"": ""NetskopeCCP"", ""type"": ""text"", ""name"": ""Index""}}]}]}}]}, {""title"": ""STEP 3 - Click Connect"", ""description"": ""Verify all fields above were filled in correctly. Press the Connect to connect Netskope to Microsoft Sentinel."", ""instructions"": [{""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Netskope organisation url"", ""description"": ""The Netskope data connector requires you to provide your organisation url. You can find your organisation url by signing into the Netskope portal.""}, {""name"": ""Netskope API key"", ""description"": ""The Netskope data connector requires you to provide a valid API key. You can create one by following the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeAlertsEvents_RestAPI_CCP/NetskopeAlertsEvents_ConnectorDefination.json","true" +"NetskopeEventsInfrastructure_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeAlertsEvents","Netskope","Netskope Alerts and Events","Netskope Security Alerts and Events","[{""title"": ""STEP 1 - Create a Netskope API key."", ""description"": ""Follow the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/) for guidance on this step.""}, {""title"": ""STEP 2 - Enter your Netskope product Details"", ""description"": ""Enter your Netskope organisation url & API Token below:"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Organisation Url"", ""placeholder"": ""Enter your organisation url"", ""type"": ""text"", ""name"": ""OrganisationURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""Enter your API Key"", ""type"": ""password"", ""name"": ""apikey""}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Remediation"", ""type"": ""text"", ""name"": ""NetskopeAlertsRemediationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Remediation data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Uba"", ""type"": ""text"", ""name"": ""NetskopeAlertsUbaingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Uba data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Security Assessment"", ""type"": ""text"", ""name"": ""NetskopeAlertsSecurityAssessmentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Security Assessment data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Quarantine"", ""type"": ""text"", ""name"": ""NetskopeAlertsQuarantineingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Quarantine data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Policy"", ""type"": ""text"", ""name"": ""NetskopeAlertsPolicyingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Policy data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malware"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalwareingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malware data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malsite"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalsiteingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malsite data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts DLP"", ""type"": ""text"", ""name"": ""NetskopeAlertsDlpingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts CTEP"", ""type"": ""text"", ""name"": ""NetskopeAlertsCtepingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts CTEP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Watchlist"", ""type"": ""text"", ""name"": ""NetskopeAlertsWatchlistingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Watchlist data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Compromised Credentials"", ""type"": ""text"", ""name"": ""NetskopeAlertsCompromisedCredentialsingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Compromised Credentials data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Content"", ""type"": ""text"", ""name"": ""NetskopeAlertsContentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Content data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Device"", ""type"": ""text"", ""name"": ""NetskopeAlertsDeviceingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Device data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Application"", ""type"": ""text"", ""name"": ""NetskopeEventsApplicationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Application data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Audit"", ""type"": ""text"", ""name"": ""NetskopeEventsAuditioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Audit data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Connection"", ""type"": ""text"", ""name"": ""NetskopeEventsConnectioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Connection data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events DLP"", ""type"": ""text"", ""name"": ""NetskopeEventsDLPingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Endpoint"", ""type"": ""text"", ""name"": ""NetskopeEventsEndpointingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Endpoint data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Infrastructure"", ""type"": ""text"", ""name"": ""NetskopeEventsInfrastructureingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Infrastructure data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Network"", ""type"": ""text"", ""name"": ""NetskopeEventsNetworkingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Network data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Page"", ""type"": ""text"", ""name"": ""NetskopeEventsPageingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Page data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""InstructionStepsGroup"", ""parameters"": {""instructionSteps"": [{""title"": ""OPTIONAL: Specify the Index the API uses."", ""description"": ""**Configuring the index is optional and only required in advanced scenario's.** \n Netskope uses an [index](https://docs.netskope.com/en/using-the-rest-api-v2-dataexport-iterator-endpoints/#how-do-iterator-endpoints-function) to retrieve events. In some advanced cases (consuming the event in multiple Microsoft Sentinel workspaces, or pre-fatiguing the index to only retrieve recent data), a customer might want to have direct control over the index."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Index"", ""placeholder"": ""NetskopeCCP"", ""type"": ""text"", ""name"": ""Index""}}]}]}}]}, {""title"": ""STEP 3 - Click Connect"", ""description"": ""Verify all fields above were filled in correctly. Press the Connect to connect Netskope to Microsoft Sentinel."", ""instructions"": [{""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Netskope organisation url"", ""description"": ""The Netskope data connector requires you to provide your organisation url. You can find your organisation url by signing into the Netskope portal.""}, {""name"": ""Netskope API key"", ""description"": ""The Netskope data connector requires you to provide a valid API key. You can create one by following the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeAlertsEvents_RestAPI_CCP/NetskopeAlertsEvents_ConnectorDefination.json","true" +"NetskopeEventsNetwork_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeAlertsEvents","Netskope","Netskope Alerts and Events","Netskope Security Alerts and Events","[{""title"": ""STEP 1 - Create a Netskope API key."", ""description"": ""Follow the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/) for guidance on this step.""}, {""title"": ""STEP 2 - Enter your Netskope product Details"", ""description"": ""Enter your Netskope organisation url & API Token below:"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Organisation Url"", ""placeholder"": ""Enter your organisation url"", ""type"": ""text"", ""name"": ""OrganisationURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""Enter your API Key"", ""type"": ""password"", ""name"": ""apikey""}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Remediation"", ""type"": ""text"", ""name"": ""NetskopeAlertsRemediationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Remediation data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Uba"", ""type"": ""text"", ""name"": ""NetskopeAlertsUbaingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Uba data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Security Assessment"", ""type"": ""text"", ""name"": ""NetskopeAlertsSecurityAssessmentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Security Assessment data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Quarantine"", ""type"": ""text"", ""name"": ""NetskopeAlertsQuarantineingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Quarantine data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Policy"", ""type"": ""text"", ""name"": ""NetskopeAlertsPolicyingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Policy data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malware"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalwareingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malware data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malsite"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalsiteingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malsite data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts DLP"", ""type"": ""text"", ""name"": ""NetskopeAlertsDlpingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts CTEP"", ""type"": ""text"", ""name"": ""NetskopeAlertsCtepingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts CTEP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Watchlist"", ""type"": ""text"", ""name"": ""NetskopeAlertsWatchlistingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Watchlist data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Compromised Credentials"", ""type"": ""text"", ""name"": ""NetskopeAlertsCompromisedCredentialsingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Compromised Credentials data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Content"", ""type"": ""text"", ""name"": ""NetskopeAlertsContentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Content data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Device"", ""type"": ""text"", ""name"": ""NetskopeAlertsDeviceingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Device data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Application"", ""type"": ""text"", ""name"": ""NetskopeEventsApplicationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Application data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Audit"", ""type"": ""text"", ""name"": ""NetskopeEventsAuditioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Audit data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Connection"", ""type"": ""text"", ""name"": ""NetskopeEventsConnectioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Connection data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events DLP"", ""type"": ""text"", ""name"": ""NetskopeEventsDLPingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Endpoint"", ""type"": ""text"", ""name"": ""NetskopeEventsEndpointingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Endpoint data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Infrastructure"", ""type"": ""text"", ""name"": ""NetskopeEventsInfrastructureingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Infrastructure data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Network"", ""type"": ""text"", ""name"": ""NetskopeEventsNetworkingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Network data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Page"", ""type"": ""text"", ""name"": ""NetskopeEventsPageingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Page data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""InstructionStepsGroup"", ""parameters"": {""instructionSteps"": [{""title"": ""OPTIONAL: Specify the Index the API uses."", ""description"": ""**Configuring the index is optional and only required in advanced scenario's.** \n Netskope uses an [index](https://docs.netskope.com/en/using-the-rest-api-v2-dataexport-iterator-endpoints/#how-do-iterator-endpoints-function) to retrieve events. In some advanced cases (consuming the event in multiple Microsoft Sentinel workspaces, or pre-fatiguing the index to only retrieve recent data), a customer might want to have direct control over the index."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Index"", ""placeholder"": ""NetskopeCCP"", ""type"": ""text"", ""name"": ""Index""}}]}]}}]}, {""title"": ""STEP 3 - Click Connect"", ""description"": ""Verify all fields above were filled in correctly. Press the Connect to connect Netskope to Microsoft Sentinel."", ""instructions"": [{""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Netskope organisation url"", ""description"": ""The Netskope data connector requires you to provide your organisation url. You can find your organisation url by signing into the Netskope portal.""}, {""name"": ""Netskope API key"", ""description"": ""The Netskope data connector requires you to provide a valid API key. You can create one by following the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeAlertsEvents_RestAPI_CCP/NetskopeAlertsEvents_ConnectorDefination.json","true" +"NetskopeEventsPage_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeAlertsEvents","Netskope","Netskope Alerts and Events","Netskope Security Alerts and Events","[{""title"": ""STEP 1 - Create a Netskope API key."", ""description"": ""Follow the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/) for guidance on this step.""}, {""title"": ""STEP 2 - Enter your Netskope product Details"", ""description"": ""Enter your Netskope organisation url & API Token below:"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Organisation Url"", ""placeholder"": ""Enter your organisation url"", ""type"": ""text"", ""name"": ""OrganisationURL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""Enter your API Key"", ""type"": ""password"", ""name"": ""apikey""}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Remediation"", ""type"": ""text"", ""name"": ""NetskopeAlertsRemediationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Remediation data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Uba"", ""type"": ""text"", ""name"": ""NetskopeAlertsUbaingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Uba data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Security Assessment"", ""type"": ""text"", ""name"": ""NetskopeAlertsSecurityAssessmentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Security Assessment data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Quarantine"", ""type"": ""text"", ""name"": ""NetskopeAlertsQuarantineingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Quarantine data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Policy"", ""type"": ""text"", ""name"": ""NetskopeAlertsPolicyingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Policy data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malware"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalwareingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malware data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Malsite"", ""type"": ""text"", ""name"": ""NetskopeAlertsMalsiteingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Malsite data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts DLP"", ""type"": ""text"", ""name"": ""NetskopeAlertsDlpingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts CTEP"", ""type"": ""text"", ""name"": ""NetskopeAlertsCtepingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts CTEP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Watchlist"", ""type"": ""text"", ""name"": ""NetskopeAlertsWatchlistingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Watchlist data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Compromised Credentials"", ""type"": ""text"", ""name"": ""NetskopeAlertsCompromisedCredentialsingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Compromised Credentials data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Content"", ""type"": ""text"", ""name"": ""NetskopeAlertsContentingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Content data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Alerts Device"", ""type"": ""text"", ""name"": ""NetskopeAlertsDeviceingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Alerts Device data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Application"", ""type"": ""text"", ""name"": ""NetskopeEventsApplicationingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Application data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Audit"", ""type"": ""text"", ""name"": ""NetskopeEventsAuditioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Audit data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Connection"", ""type"": ""text"", ""name"": ""NetskopeEventsConnectioningestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Connection data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events DLP"", ""type"": ""text"", ""name"": ""NetskopeEventsDLPingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events DLP data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Endpoint"", ""type"": ""text"", ""name"": ""NetskopeEventsEndpointingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Endpoint data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Infrastructure"", ""type"": ""text"", ""name"": ""NetskopeEventsInfrastructureingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Infrastructure data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Network"", ""type"": ""text"", ""name"": ""NetskopeEventsNetworkingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Network data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Netskope Events Page"", ""type"": ""text"", ""name"": ""NetskopeEventsPageingestion"", ""required"": true, ""placeholder"": ""Do you want to ingest Netskope Events Page data?"", ""options"": [{""key"": ""Yes"", ""text"": ""Yes""}, {""key"": ""No"", ""text"": ""No""}]}}, {""type"": ""InstructionStepsGroup"", ""parameters"": {""instructionSteps"": [{""title"": ""OPTIONAL: Specify the Index the API uses."", ""description"": ""**Configuring the index is optional and only required in advanced scenario's.** \n Netskope uses an [index](https://docs.netskope.com/en/using-the-rest-api-v2-dataexport-iterator-endpoints/#how-do-iterator-endpoints-function) to retrieve events. In some advanced cases (consuming the event in multiple Microsoft Sentinel workspaces, or pre-fatiguing the index to only retrieve recent data), a customer might want to have direct control over the index."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Index"", ""placeholder"": ""NetskopeCCP"", ""type"": ""text"", ""name"": ""Index""}}]}]}}]}, {""title"": ""STEP 3 - Click Connect"", ""description"": ""Verify all fields above were filled in correctly. Press the Connect to connect Netskope to Microsoft Sentinel."", ""instructions"": [{""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Netskope organisation url"", ""description"": ""The Netskope data connector requires you to provide your organisation url. You can find your organisation url by signing into the Netskope portal.""}, {""name"": ""Netskope API key"", ""description"": ""The Netskope data connector requires you to provide a valid API key. You can create one by following the [Netskope documentation](https://docs.netskope.com/en/rest-api-v2-overview-312207/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeAlertsEvents_RestAPI_CCP/NetskopeAlertsEvents_ConnectorDefination.json","true" +"Netskope_WebTx_metrics_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Netskope APIs to pull its Alerts and Events data into custom log table. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create the azure functions for Netskope Alerts and Events Data Collection**\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Netskope API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""Using the ARM template deploy the function apps for ingestion of Netskope events and alerts data to Sentinel.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSelect Yes in Alerts and Events types dropdown for that endpoint you want to fetch Alerts and Events \n\t\tLog Level \n\t\tWorkspace ID \n\t\tWorkspace Key \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. See the documentation to learn more about API on the [Rest API reference](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" +"alertscompromisedcredentialdata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Netskope APIs to pull its Alerts and Events data into custom log table. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create the azure functions for Netskope Alerts and Events Data Collection**\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Netskope API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""Using the ARM template deploy the function apps for ingestion of Netskope events and alerts data to Sentinel.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSelect Yes in Alerts and Events types dropdown for that endpoint you want to fetch Alerts and Events \n\t\tLog Level \n\t\tWorkspace ID \n\t\tWorkspace Key \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. See the documentation to learn more about API on the [Rest API reference](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" +"alertsctepdata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Netskope APIs to pull its Alerts and Events data into custom log table. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create the azure functions for Netskope Alerts and Events Data Collection**\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Netskope API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""Using the ARM template deploy the function apps for ingestion of Netskope events and alerts data to Sentinel.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSelect Yes in Alerts and Events types dropdown for that endpoint you want to fetch Alerts and Events \n\t\tLog Level \n\t\tWorkspace ID \n\t\tWorkspace Key \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. See the documentation to learn more about API on the [Rest API reference](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" +"alertsdlpdata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Netskope APIs to pull its Alerts and Events data into custom log table. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create the azure functions for Netskope Alerts and Events Data Collection**\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Netskope API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""Using the ARM template deploy the function apps for ingestion of Netskope events and alerts data to Sentinel.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSelect Yes in Alerts and Events types dropdown for that endpoint you want to fetch Alerts and Events \n\t\tLog Level \n\t\tWorkspace ID \n\t\tWorkspace Key \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. See the documentation to learn more about API on the [Rest API reference](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" +"alertsmalsitedata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Netskope APIs to pull its Alerts and Events data into custom log table. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create the azure functions for Netskope Alerts and Events Data Collection**\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Netskope API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""Using the ARM template deploy the function apps for ingestion of Netskope events and alerts data to Sentinel.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSelect Yes in Alerts and Events types dropdown for that endpoint you want to fetch Alerts and Events \n\t\tLog Level \n\t\tWorkspace ID \n\t\tWorkspace Key \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. See the documentation to learn more about API on the [Rest API reference](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" +"alertsmalwaredata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Netskope APIs to pull its Alerts and Events data into custom log table. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create the azure functions for Netskope Alerts and Events Data Collection**\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Netskope API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""Using the ARM template deploy the function apps for ingestion of Netskope events and alerts data to Sentinel.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSelect Yes in Alerts and Events types dropdown for that endpoint you want to fetch Alerts and Events \n\t\tLog Level \n\t\tWorkspace ID \n\t\tWorkspace Key \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. See the documentation to learn more about API on the [Rest API reference](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" +"alertspolicydata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Netskope APIs to pull its Alerts and Events data into custom log table. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create the azure functions for Netskope Alerts and Events Data Collection**\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Netskope API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""Using the ARM template deploy the function apps for ingestion of Netskope events and alerts data to Sentinel.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSelect Yes in Alerts and Events types dropdown for that endpoint you want to fetch Alerts and Events \n\t\tLog Level \n\t\tWorkspace ID \n\t\tWorkspace Key \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. See the documentation to learn more about API on the [Rest API reference](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" +"alertsquarantinedata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Netskope APIs to pull its Alerts and Events data into custom log table. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create the azure functions for Netskope Alerts and Events Data Collection**\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Netskope API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""Using the ARM template deploy the function apps for ingestion of Netskope events and alerts data to Sentinel.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSelect Yes in Alerts and Events types dropdown for that endpoint you want to fetch Alerts and Events \n\t\tLog Level \n\t\tWorkspace ID \n\t\tWorkspace Key \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. See the documentation to learn more about API on the [Rest API reference](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" +"alertsremediationdata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Netskope APIs to pull its Alerts and Events data into custom log table. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create the azure functions for Netskope Alerts and Events Data Collection**\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Netskope API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""Using the ARM template deploy the function apps for ingestion of Netskope events and alerts data to Sentinel.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSelect Yes in Alerts and Events types dropdown for that endpoint you want to fetch Alerts and Events \n\t\tLog Level \n\t\tWorkspace ID \n\t\tWorkspace Key \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. See the documentation to learn more about API on the [Rest API reference](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" +"alertssecurityassessmentdata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Netskope APIs to pull its Alerts and Events data into custom log table. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create the azure functions for Netskope Alerts and Events Data Collection**\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Netskope API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""Using the ARM template deploy the function apps for ingestion of Netskope events and alerts data to Sentinel.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSelect Yes in Alerts and Events types dropdown for that endpoint you want to fetch Alerts and Events \n\t\tLog Level \n\t\tWorkspace ID \n\t\tWorkspace Key \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. See the documentation to learn more about API on the [Rest API reference](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" +"alertsubadata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Netskope APIs to pull its Alerts and Events data into custom log table. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create the azure functions for Netskope Alerts and Events Data Collection**\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Netskope API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""Using the ARM template deploy the function apps for ingestion of Netskope events and alerts data to Sentinel.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSelect Yes in Alerts and Events types dropdown for that endpoint you want to fetch Alerts and Events \n\t\tLog Level \n\t\tWorkspace ID \n\t\tWorkspace Key \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. See the documentation to learn more about API on the [Rest API reference](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" +"eventsapplicationdata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Netskope APIs to pull its Alerts and Events data into custom log table. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create the azure functions for Netskope Alerts and Events Data Collection**\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Netskope API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""Using the ARM template deploy the function apps for ingestion of Netskope events and alerts data to Sentinel.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSelect Yes in Alerts and Events types dropdown for that endpoint you want to fetch Alerts and Events \n\t\tLog Level \n\t\tWorkspace ID \n\t\tWorkspace Key \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. See the documentation to learn more about API on the [Rest API reference](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" +"eventsauditdata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Netskope APIs to pull its Alerts and Events data into custom log table. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create the azure functions for Netskope Alerts and Events Data Collection**\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Netskope API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""Using the ARM template deploy the function apps for ingestion of Netskope events and alerts data to Sentinel.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSelect Yes in Alerts and Events types dropdown for that endpoint you want to fetch Alerts and Events \n\t\tLog Level \n\t\tWorkspace ID \n\t\tWorkspace Key \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. See the documentation to learn more about API on the [Rest API reference](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" +"eventsconnectiondata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Netskope APIs to pull its Alerts and Events data into custom log table. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create the azure functions for Netskope Alerts and Events Data Collection**\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Netskope API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""Using the ARM template deploy the function apps for ingestion of Netskope events and alerts data to Sentinel.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSelect Yes in Alerts and Events types dropdown for that endpoint you want to fetch Alerts and Events \n\t\tLog Level \n\t\tWorkspace ID \n\t\tWorkspace Key \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. See the documentation to learn more about API on the [Rest API reference](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" +"eventsincidentdata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Netskope APIs to pull its Alerts and Events data into custom log table. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create the azure functions for Netskope Alerts and Events Data Collection**\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Netskope API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""Using the ARM template deploy the function apps for ingestion of Netskope events and alerts data to Sentinel.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSelect Yes in Alerts and Events types dropdown for that endpoint you want to fetch Alerts and Events \n\t\tLog Level \n\t\tWorkspace ID \n\t\tWorkspace Key \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. See the documentation to learn more about API on the [Rest API reference](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" +"eventsnetworkdata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Netskope APIs to pull its Alerts and Events data into custom log table. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create the azure functions for Netskope Alerts and Events Data Collection**\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Netskope API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""Using the ARM template deploy the function apps for ingestion of Netskope events and alerts data to Sentinel.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSelect Yes in Alerts and Events types dropdown for that endpoint you want to fetch Alerts and Events \n\t\tLog Level \n\t\tWorkspace ID \n\t\tWorkspace Key \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. See the documentation to learn more about API on the [Rest API reference](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" +"eventspagedata_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeDataConnector","Netskope","Netskope Data Connector","The [Netskope](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/) data connector provides the following capabilities:
1. NetskopeToAzureStorage :
>* Get the Netskope Alerts and Events data from Netskope and ingest to Azure storage.
2. StorageToSentinel :
>* Get the Netskope Alerts and Events data from Azure storage and ingest to custom log table in log analytics workspace.
3. WebTxMetrics :
>* Get the WebTxMetrics data from Netskope and ingest to custom log table in log analytics workspace.


For more details of REST APIs refer to the below documentations:
1. Netskope API documentation:
> https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/
2. Azure storage documentation:
> https://learn.microsoft.com/azure/storage/common/storage-introduction
3. Microsoft log analytic documentation:
> https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-overview","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Netskope APIs to pull its Alerts and Events data into custom log table. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of the TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 2 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TriggersSync playbook. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TriggersSync playbook. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 3 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 4 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 5 - Steps to create the azure functions for Netskope Alerts and Events Data Collection**\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the Netskope API Authorization Key(s)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""description"": ""Using the ARM template deploy the function apps for ingestion of Netskope events and alerts data to Sentinel.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSelect Yes in Alerts and Events types dropdown for that endpoint you want to fetch Alerts and Events \n\t\tLog Level \n\t\tWorkspace ID \n\t\tWorkspace Key \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. See the documentation to learn more about API on the [Rest API reference](https://docs.netskope.com/en/netskope-help/admin-console/rest-api/rest-api-v2-overview-312207/)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeDataConnector/Netskope_FunctionApp.json","true" +"NetskopeWebtxData_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeWebTransactionsDataConnector","Netskope","Netskope Web Transactions Data Connector","The [Netskope Web Transactions](https://docs.netskope.com/en/netskope-help/data-security/transaction-events/netskope-transaction-events/) data connector provides the functionality of a docker image to pull the Netskope Web Transactions data from google pubsublite, process the data and ingest the processed data to Log Analytics. As part of this data connector two tables will be formed in Log Analytics, one for Web Transactions data and other for errors encountered during execution.


For more details related to Web Transactions refer to the below documentation:
1. Netskope Web Transactions documentation:
> https://docs.netskope.com/en/netskope-help/data-security/transaction-events/netskope-transaction-events/
","[{""title"": """", ""description"": "">**NOTE:** This connector provides the functionality of ingesting Netskope Web Transactions data using a docker image to be deployed on a virtual machine (Either Azure VM/On Premise VM). Check the [Azure VM pricing page](https://azure.microsoft.com/pricing/details/virtual-machines/linux) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 2 - Choose one from the following two deployment options to deploy the docker based data connector to ingest Netskope Web Transactions data **\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available, as well as the Netskope API Authorization Key(s) [Make sure the token has permissions for transaction events]."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Using Azure Resource Manager (ARM) Template to deploy VM [Recommended]"", ""description"": ""Using the ARM template deploy an Azure VM, install the prerequisites and start execution.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2WebTransactions-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tDocker Image Name (mgulledge/netskope-microsoft-sentinel-plugin:netskopewebtransactions)\n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSeek Timestamp (The epoch timestamp that you want to seek the pubsublite pointer, can be left empty) \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tBackoff Retry Count (The retry count for token related errors before restarting the execution.) \n\t\tBackoff Sleep Time (Number of seconds to sleep before retrying) \n\t\tIdle Timeout (Number of seconds to wait for Web Transactions Data before restarting execution) \n\t\tVM Name \n\t\tAuthentication Type \n\t\tAdmin Password or Key \n\t\tDNS Label Prefix \n\t\tUbuntu OS Version \n\t\tLocation \n\t\tVM Size \n\t\tSubnet Name \n\t\tNetwork Security Group Name \n\t\tSecurity Type \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}, {""title"": ""Option 2 - Manual Deployment on previously created virtual machine"", ""description"": ""Use the following step-by-step instructions to deploy the docker based data connector manually on a previously created virtual machine.""}, {""title"": """", ""description"": ""**1. Install docker and pull docker Image**\n\n>**NOTE:** Make sure that the VM is linux based (preferably Ubuntu).\n\n1. Firstly you will need to [SSH into the virtual machine](https://learn.microsoft.com/azure/virtual-machines/linux-vm-connect?tabs=Linux).\n2. Now install [docker engine](https://docs.docker.com/engine/install/).\n3. Now pull the docker image from docker hub using the command: 'sudo docker pull mgulledge/netskope-microsoft-sentinel-plugin:netskopewebtransactions'.\n4. Now to run the docker image use the command: 'sudo docker run -it -v $(pwd)/docker_persistent_volume:/app mgulledge/netskope-microsoft-sentinel-plugin:netskopewebtransactions'. You can replace mgulledge/netskope-microsoft-sentinel-plugin:netskopewebtransactions with the image id. Here docker_persistent_volume is the name of the folder that would be created on the vm in which the files will get stored.""}, {""title"": """", ""description"": ""**2. Configure the Parameters**\n\n1. Once the docker image is running it will ask for the required parameters.\n2. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSeek Timestamp (The epoch timestamp that you want to seek the pubsublite pointer, can be left empty) \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tBackoff Retry Count (The retry count for token related errors before restarting the execution.) \n\t\tBackoff Sleep Time (Number of seconds to sleep before retrying) \n\t\tIdle Timeout (Number of seconds to wait for Web Transactions Data before restarting execution)\n3. Now the execution has started but is in interactive mode, so that shell cannot be stopped. To run it as a background process, stop the current execution by pressing Ctrl+C and then use the command: 'sudo docker run -d -v $(pwd)/docker_persistent_volume:/app mgulledge/netskope-microsoft-sentinel-plugin:netskopewebtransactions'.""}, {""title"": """", ""description"": ""**3. Stop the docker container**\n\n1. Use the command 'sudo docker container ps' to list the running docker containers. Note down your container id.\n2. Now stop the container using the command: 'sudo docker stop *<*container-id*>*'.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Compute permissions"", ""description"": ""Read and write permissions to Azure VMs is required. [See the documentation to learn more about Azure VMs](https://learn.microsoft.com/azure/virtual-machines/overview).""}, {""name"": ""TransactionEvents Credentials and Permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. [See the documentation to learn more about Transaction Events.](https://docs.netskope.com/en/netskope-help/data-security/transaction-events/netskope-transaction-events/)""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeWebTransactionsDataConnector/Netskope_WebTransactions.json","true" +"NetskopeWebtxErrors_CL","Netskopev2","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2","netskope","netskope_mss","2024-03-18","2024-03-18","","Netskope","Partner","https://www.netskope.com/services#support","","domains","NetskopeWebTransactionsDataConnector","Netskope","Netskope Web Transactions Data Connector","The [Netskope Web Transactions](https://docs.netskope.com/en/netskope-help/data-security/transaction-events/netskope-transaction-events/) data connector provides the functionality of a docker image to pull the Netskope Web Transactions data from google pubsublite, process the data and ingest the processed data to Log Analytics. As part of this data connector two tables will be formed in Log Analytics, one for Web Transactions data and other for errors encountered during execution.


For more details related to Web Transactions refer to the below documentation:
1. Netskope Web Transactions documentation:
> https://docs.netskope.com/en/netskope-help/data-security/transaction-events/netskope-transaction-events/
","[{""title"": """", ""description"": "">**NOTE:** This connector provides the functionality of ingesting Netskope Web Transactions data using a docker image to be deployed on a virtual machine (Either Azure VM/On Premise VM). Check the [Azure VM pricing page](https://azure.microsoft.com/pricing/details/virtual-machines/linux) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to create/get Credentials for the Netskope account** \n\n Follow the steps in this section to create/get **Netskope Hostname** and **Netskope API Token**:\n 1. Login to your **Netskope Tenant** and go to the **Settings menu** on the left navigation bar.\n 2. Click on Tools and then **REST API v2**\n 3. Now, click on the new token button. Then it will ask for token name, expiration duration and the endpoints that you want to fetch data from.\n 5. Once that is done click the save button, the token will be generated. Copy the token and save at a secure place for further usage.""}, {""title"": """", ""description"": ""**STEP 2 - Choose one from the following two deployment options to deploy the docker based data connector to ingest Netskope Web Transactions data **\n\n>**IMPORTANT:** Before deploying Netskope data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available, as well as the Netskope API Authorization Key(s) [Make sure the token has permissions for transaction events]."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Using Azure Resource Manager (ARM) Template to deploy VM [Recommended]"", ""description"": ""Using the ARM template deploy an Azure VM, install the prerequisites and start execution.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-NetskopeV2WebTransactions-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tDocker Image Name (mgulledge/netskope-microsoft-sentinel-plugin:netskopewebtransactions)\n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSeek Timestamp (The epoch timestamp that you want to seek the pubsublite pointer, can be left empty) \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tBackoff Retry Count (The retry count for token related errors before restarting the execution.) \n\t\tBackoff Sleep Time (Number of seconds to sleep before retrying) \n\t\tIdle Timeout (Number of seconds to wait for Web Transactions Data before restarting execution) \n\t\tVM Name \n\t\tAuthentication Type \n\t\tAdmin Password or Key \n\t\tDNS Label Prefix \n\t\tUbuntu OS Version \n\t\tLocation \n\t\tVM Size \n\t\tSubnet Name \n\t\tNetwork Security Group Name \n\t\tSecurity Type \n4. Click on **Review+Create**. \n5. Then after validation click on **Create** to deploy.""}, {""title"": ""Option 2 - Manual Deployment on previously created virtual machine"", ""description"": ""Use the following step-by-step instructions to deploy the docker based data connector manually on a previously created virtual machine.""}, {""title"": """", ""description"": ""**1. Install docker and pull docker Image**\n\n>**NOTE:** Make sure that the VM is linux based (preferably Ubuntu).\n\n1. Firstly you will need to [SSH into the virtual machine](https://learn.microsoft.com/azure/virtual-machines/linux-vm-connect?tabs=Linux).\n2. Now install [docker engine](https://docs.docker.com/engine/install/).\n3. Now pull the docker image from docker hub using the command: 'sudo docker pull mgulledge/netskope-microsoft-sentinel-plugin:netskopewebtransactions'.\n4. Now to run the docker image use the command: 'sudo docker run -it -v $(pwd)/docker_persistent_volume:/app mgulledge/netskope-microsoft-sentinel-plugin:netskopewebtransactions'. You can replace mgulledge/netskope-microsoft-sentinel-plugin:netskopewebtransactions with the image id. Here docker_persistent_volume is the name of the folder that would be created on the vm in which the files will get stored.""}, {""title"": """", ""description"": ""**2. Configure the Parameters**\n\n1. Once the docker image is running it will ask for the required parameters.\n2. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tNetskope HostName \n\t\tNetskope API Token \n\t\tSeek Timestamp (The epoch timestamp that you want to seek the pubsublite pointer, can be left empty) \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tBackoff Retry Count (The retry count for token related errors before restarting the execution.) \n\t\tBackoff Sleep Time (Number of seconds to sleep before retrying) \n\t\tIdle Timeout (Number of seconds to wait for Web Transactions Data before restarting execution)\n3. Now the execution has started but is in interactive mode, so that shell cannot be stopped. To run it as a background process, stop the current execution by pressing Ctrl+C and then use the command: 'sudo docker run -d -v $(pwd)/docker_persistent_volume:/app mgulledge/netskope-microsoft-sentinel-plugin:netskopewebtransactions'.""}, {""title"": """", ""description"": ""**3. Stop the docker container**\n\n1. Use the command 'sudo docker container ps' to list the running docker containers. Note down your container id.\n2. Now stop the container using the command: 'sudo docker stop *<*container-id*>*'.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in Microsoft Entra ID and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Compute permissions"", ""description"": ""Read and write permissions to Azure VMs is required. [See the documentation to learn more about Azure VMs](https://learn.microsoft.com/azure/virtual-machines/overview).""}, {""name"": ""TransactionEvents Credentials and Permissions"", ""description"": ""**Netskope Tenant** and **Netskope API Token** is required. [See the documentation to learn more about Transaction Events.](https://docs.netskope.com/en/netskope-help/data-security/transaction-events/netskope-transaction-events/)""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netskopev2/Data%20Connectors/NetskopeWebTransactionsDataConnector/Netskope_WebTransactions.json","true" +"","Network Session Essentials","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Network%20Session%20Essentials","azuresentinel","azure-sentinel-solution-networksession","2022-11-11","2022-11-11","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","Network Threat Protection Essentials","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Network%20Threat%20Protection%20Essentials","azuresentinel","azure-sentinel-solution-networkthreatdetection","2022-11-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"CommonSecurityLog","Netwrix Auditor","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netwrix%20Auditor","azuresentinel","azure-sentinel-solution-netwrixauditor","2022-06-17","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","Netwrix","Netwrix","[Deprecated] Netwrix Auditor via Legacy Agent","Netwrix Auditor data connector provides the capability to ingest [Netwrix Auditor (formerly Stealthbits Privileged Activity Manager)](https://www.netwrix.com/auditor.html) events into Microsoft Sentinel. Refer to [Netwrix documentation](https://helpcenter.netwrix.com/) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on NetwrixAuditor parser based on a Kusto Function to work as expected. This parser is installed along with solution installation."", ""instructions"": []}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Configure Netwrix Auditor to send logs using CEF"", ""description"": ""[Follow the instructions](https://www.netwrix.com/download/QuickStart/Netwrix_Auditor_Add-on_for_HPE_ArcSight_Quick_Start_Guide.pdf) to configure event export from Netwrix Auditor.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netwrix%20Auditor/Data%20Connectors/Connector_NetwrixAuditor.json","true" +"CommonSecurityLog","Netwrix Auditor","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netwrix%20Auditor","azuresentinel","azure-sentinel-solution-netwrixauditor","2022-06-17","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","NetwrixAma","Netwrix","[Deprecated] Netwrix Auditor via AMA","Netwrix Auditor data connector provides the capability to ingest [Netwrix Auditor (formerly Stealthbits Privileged Activity Manager)](https://www.netwrix.com/auditor.html) events into Microsoft Sentinel. Refer to [Netwrix documentation](https://helpcenter.netwrix.com/) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on NetwrixAuditor parser based on a Kusto Function to work as expected. This parser is installed along with solution installation."", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Configure Netwrix Auditor to send logs using CEF"", ""description"": ""[Follow the instructions](https://www.netwrix.com/download/QuickStart/Netwrix_Auditor_Add-on_for_HPE_ArcSight_Quick_Start_Guide.pdf) to configure event export from Netwrix Auditor."", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Netwrix%20Auditor/Data%20Connectors/template_NetwrixAuditorAMA.json","true" +"","Neustar IP GeoPoint","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Neustar%20IP%20GeoPoint","azuresentinel","azure-sentinel-solution-neustaripgeopoint","2022-09-30","2022-09-30","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"NonameAPISecurityAlert_CL","NonameSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NonameSecurity","nonamegate","nonamesecurity_sentinelsolution","2022-12-01","","","Noname Security","Partner","https://nonamesecurity.com/","","domains","NonameSecurityMicrosoftSentinel","Noname Security","Noname Security for Microsoft Sentinel","Noname Security solution to POST data into a Microsoft Sentinel SIEM workspace via the Azure Monitor REST API","[{""title"": ""Configure the Noname Sentinel integration."", ""description"": ""Configure the Sentinel workflow in the Noname integrations settings. Find documentation at https://docs.nonamesecurity.com"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NonameSecurity/Data%20Connectors/Connector_RESTAPI_NonameSecurity.json","true" +"NordPassEventLogs_CL","NordPass","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NordPass","nordsecurityinc","azure-sentinel-solution-nordpass","2025-04-22","","","NordPass","Partner","https://support.nordpass.com/","","domains","NordPass","NordPass","NordPass","Integrating NordPass with Microsoft Sentinel SIEM via the API will allow you to automatically transfer Activity Log data from NordPass to Microsoft Sentinel and get real-time insights, such as item activity, all login attempts, and security notifications.","[{""description"": ""To proceed with the Microsoft Sentinel setup\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-Nordpass-azuredeploy)\n2. **Please note that after the successful deployment, the system pulls Activity Log data every 1 minute by default.**""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""description"": ""Ensure that the [resource group](https://learn.microsoft.com/en-us/azure/azure-resource-manager/management/manage-resource-groups-portal#create-resource-groups) and the [Log Analytics workspace](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/quick-create-workspace?tabs=azure-portal) are created and located in the same region so you can deploy the Azure Functions.""}, {""description"": ""[Add Microsoft Sentinel](https://learn.microsoft.com/en-us/azure/sentinel/quickstart-onboard#enable-microsoft-sentinel-) to the created Log Analytics workspace.""}, {""description"": ""Generate a [Microsoft Sentinel API URL and token](https://www.google.com/url?q=https://support.nordpass.com/hc/en-us/articles/31972037289873&sa=D&source=docs&ust=1743770997230005&usg=AOvVaw16p0hstJ6OeBBoFdBKZRfr) in the NordPass Admin Panel to finish the Azure Functions integration. Please note that you\u2019ll need the NordPass Enterprise account for that.""}, {""description"": ""**Important:** This connector uses Azure Functions to retrieve Activity Logs from NordPass into Microsoft Sentinel. This may result in additional data ingestion costs. For more information, refer to the Azure Functions pricing page.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NordPass/Data%20Connectors/NordPass_API_FunctionApp.json;https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NordPass/Data%20Connectors/deployment/NordPass_data_connector.json","false" +"CommonSecurityLog","NozomiNetworks","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NozomiNetworks","azuresentinel","azure-sentinel-solution-nozominetworks","2022-07-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","NozomiNetworksN2OS","Nozomi Networks","[Deprecated] Nozomi Networks N2OS via Legacy Agent","The [Nozomi Networks](https://www.nozominetworks.com/) data connector provides the capability to ingest Nozomi Networks Events into Microsoft Sentinel. Refer to the Nozomi Networks [PDF documentation](https://www.nozominetworks.com/resources/data-sheets-brochures-learning-guides/) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**NozomiNetworksEvents**](https://aka.ms/sentinel-NozomiNetworks-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Follow these steps to configure Nozomi Networks device for sending Alerts, Audit Logs, Health Logs log via syslog in CEF format:\n\n> 1. Log in to the Guardian console.\n\n> 2. Navigate to Administration->Data Integration, press +Add and select the Common Event Format (CEF) from the drop down\n\n> 3. Create New Endpoint using the appropriate host information and enable Alerts, Audit Logs, Health Logs for sending.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NozomiNetworks/Data%20Connectors/NozomiNetworksN2OS.json","true" +"CommonSecurityLog","NozomiNetworks","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NozomiNetworks","azuresentinel","azure-sentinel-solution-nozominetworks","2022-07-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","NozomiNetworksN2OSAma","Nozomi Networks","[Deprecated] Nozomi Networks N2OS via AMA","The [Nozomi Networks](https://www.nozominetworks.com/) data connector provides the capability to ingest Nozomi Networks Events into Microsoft Sentinel. Refer to the Nozomi Networks [PDF documentation](https://www.nozominetworks.com/resources/data-sheets-brochures-learning-guides/) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**NozomiNetworksEvents**](https://aka.ms/sentinel-NozomiNetworks-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Follow these steps to configure Nozomi Networks device for sending Alerts, Audit Logs, Health Logs log via syslog in CEF format:\n\n> 1. Log in to the Guardian console.\n\n> 2. Navigate to Administration->Data Integration, press +Add and select the Common Event Format (CEF) from the drop down\n\n> 3. Create New Endpoint using the appropriate host information and enable Alerts, Audit Logs, Health Logs for sending."", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/NozomiNetworks/Data%20Connectors/template_NozomiNetworksN2OSAMA.json","true" +"CommonSecurityLog","OSSEC","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OSSEC","azuresentinel","azure-sentinel-solution-ossec","2022-05-19","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","OSSEC","OSSEC","[Deprecated] OSSEC via Legacy Agent","OSSEC data connector provides the capability to ingest [OSSEC](https://www.ossec.net/) events into Microsoft Sentinel. Refer to [OSSEC documentation](https://www.ossec.net/docs) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias OSSEC and load the function code or click [here](https://aka.ms/sentinel-OSSECEvent-parser), on the second line of the query, enter the hostname(s) of your OSSEC device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""[Follow these steps](https://www.ossec.net/docs/docs/manual/output/syslog-output.html) to configure OSSEC sending alerts via syslog.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OSSEC/Data%20Connectors/Connector_CEF_OSSEC.json","true" +"CommonSecurityLog","OSSEC","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OSSEC","azuresentinel","azure-sentinel-solution-ossec","2022-05-19","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","OSSECAma","OSSEC","[Deprecated] OSSEC via AMA","OSSEC data connector provides the capability to ingest [OSSEC](https://www.ossec.net/) events into Microsoft Sentinel. Refer to [OSSEC documentation](https://www.ossec.net/docs) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias OSSEC and load the function code or click [here](https://aka.ms/sentinel-OSSECEvent-parser), on the second line of the query, enter the hostname(s) of your OSSEC device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""[Follow these steps](https://www.ossec.net/docs/docs/manual/output/syslog-output.html) to configure OSSEC sending alerts via syslog."", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OSSEC/Data%20Connectors/template_OSSECAMA.json","true" +"ObsidianActivity_CL","Obsidian Datasharing","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Obsidian%20Datasharing","391c3d87-edc8-4f72-a719-825c022b8eb4","azure-sentinel-solution-obsidian-activity-threat","2024-01-01","","","Obsidian Security","Partner","https://obsidiansecurity.com/contact","","domains","ObsidianDatasharing","Obsidian Security","Obsidian Datasharing Connector","The Obsidian Datasharing connector provides the capability to read raw event data from Obsidian Datasharing in Microsoft Sentinel.","[{""title"": ""1. Create ARM Resources and Provide the Required Permissions"", ""description"": ""This connector reads data from the tables that Obsidian Datasharing uses in a Microsoft Analytics Workspace, if the data forwarding option is enabled in Obsidian Datasharing then raw event data is sent to the Microsoft Sentinel Ingestion API."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Automated Configuration and Secure Data Ingestion with Entra Application \nClicking on \""Deploy\"" will trigger the creation of Log Analytics tables and a Data Collection Rule (DCR). \nIt will then create an Entra application, link the DCR to it, and set the entered secret in the application. This setup enables data to be sent securely to the DCR using an Entra token.""}}, {""parameters"": {""label"": ""Deploy Obsidian Datasharing connector resources"", ""applicationDisplayName"": ""Obsidian Datasharing Connector Application""}, ""type"": ""DeployPushConnectorButton""}]}, {""title"": ""2. Push your logs into the workspace"", ""description"": ""Use the following parameters to configure the your machine to send the logs to the workspace."", ""instructions"": [{""parameters"": {""label"": ""Tenant ID (Directory ID)"", ""fillWith"": [""TenantId""]}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Application ID"", ""fillWith"": [""ApplicationId""], ""placeholder"": ""Deploy push connector to get the App Registration Application ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Secret"", ""fillWith"": [""ApplicationSecret""], ""placeholder"": ""Deploy push connector to get the App Registration Secret""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Endpoint Uri"", ""fillWith"": [""DataCollectionEndpoint""], ""placeholder"": ""Deploy push connector to get the Data Collection Endpoint Uri""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Rule Immutable ID"", ""fillWith"": [""DataCollectionRuleId""], ""placeholder"": ""Deploy push connector to get the Data Collection Rule Immutable ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Activity Stream Name"", ""value"": ""Custom-ObsidianActivity_CL""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Threat Stream Name"", ""value"": ""Custom-ObsidianThreat_CL""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft Entra"", ""description"": ""Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher.""}, {""name"": ""Microsoft Azure"", ""description"": ""Permission to assign Monitoring Metrics Publisher role on data collection rule (DCR). Typically requires Azure RBAC Owner or User Access Administrator role""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Obsidian%20Datasharing/Data%20Connectors/ObsidianDatasharing_CCP/ObsidianDatasharing_ConnectorDefinition.json","true" +"ObsidianThreat_CL","Obsidian Datasharing","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Obsidian%20Datasharing","391c3d87-edc8-4f72-a719-825c022b8eb4","azure-sentinel-solution-obsidian-activity-threat","2024-01-01","","","Obsidian Security","Partner","https://obsidiansecurity.com/contact","","domains","ObsidianDatasharing","Obsidian Security","Obsidian Datasharing Connector","The Obsidian Datasharing connector provides the capability to read raw event data from Obsidian Datasharing in Microsoft Sentinel.","[{""title"": ""1. Create ARM Resources and Provide the Required Permissions"", ""description"": ""This connector reads data from the tables that Obsidian Datasharing uses in a Microsoft Analytics Workspace, if the data forwarding option is enabled in Obsidian Datasharing then raw event data is sent to the Microsoft Sentinel Ingestion API."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Automated Configuration and Secure Data Ingestion with Entra Application \nClicking on \""Deploy\"" will trigger the creation of Log Analytics tables and a Data Collection Rule (DCR). \nIt will then create an Entra application, link the DCR to it, and set the entered secret in the application. This setup enables data to be sent securely to the DCR using an Entra token.""}}, {""parameters"": {""label"": ""Deploy Obsidian Datasharing connector resources"", ""applicationDisplayName"": ""Obsidian Datasharing Connector Application""}, ""type"": ""DeployPushConnectorButton""}]}, {""title"": ""2. Push your logs into the workspace"", ""description"": ""Use the following parameters to configure the your machine to send the logs to the workspace."", ""instructions"": [{""parameters"": {""label"": ""Tenant ID (Directory ID)"", ""fillWith"": [""TenantId""]}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Application ID"", ""fillWith"": [""ApplicationId""], ""placeholder"": ""Deploy push connector to get the App Registration Application ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Secret"", ""fillWith"": [""ApplicationSecret""], ""placeholder"": ""Deploy push connector to get the App Registration Secret""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Endpoint Uri"", ""fillWith"": [""DataCollectionEndpoint""], ""placeholder"": ""Deploy push connector to get the Data Collection Endpoint Uri""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Rule Immutable ID"", ""fillWith"": [""DataCollectionRuleId""], ""placeholder"": ""Deploy push connector to get the Data Collection Rule Immutable ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Activity Stream Name"", ""value"": ""Custom-ObsidianActivity_CL""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Threat Stream Name"", ""value"": ""Custom-ObsidianThreat_CL""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft Entra"", ""description"": ""Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher.""}, {""name"": ""Microsoft Azure"", ""description"": ""Permission to assign Monitoring Metrics Publisher role on data collection rule (DCR). Typically requires Azure RBAC Owner or User Access Administrator role""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Obsidian%20Datasharing/Data%20Connectors/ObsidianDatasharing_CCP/ObsidianDatasharing_ConnectorDefinition.json","true" +"Okta_CL","Okta Single Sign-On","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Okta%20Single%20Sign-On","azuresentinel","azure-sentinel-solution-okta","2022-03-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OktaSSO","Okta","Okta Single Sign-On","The [Okta Single Sign-On (SSO)](https://www.okta.com/products/single-sign-on/) connector provides the capability to ingest audit and event logs from the Okta API into Microsoft Sentinel. The connector provides visibility into these log types in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to Okta SSO to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated, if you have previously deployed an earlier version, and want to update, please delete the existing Okta Azure Function before redeploying this version.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Okta SSO API**\n\n [Follow these instructions](https://developer.okta.com/docs/guides/create-an-api-token/create-the-token/) to create an API Token.""}, {""title"": """", ""description"": ""**Note** - For more information on the rate limit restrictions enforced by Okta, please refer to the **[documentation](https://developer.okta.com/docs/reference/rl-global-mgmt/)**.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Okta SSO connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Okta SSO API Authorization Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""This method provides an automated deployment of the Okta SSO connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentineloktaazuredeployv2-solution) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentineloktaazuredeployv2-solution-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **API Token** and **URI**. \n - Use the following schema for the `uri` value: `https:///api/v1/logs?since=` Replace `` with your domain. [Click here](https://developer.okta.com/docs/reference/api-overview/#url-namespace) for further details on how to identify your Okta domain namespace. There is no need to add a time value to the URI, the Function App will dynamically append the inital start time of logs to UTC 0:00 for the current UTC date as time value to the URI in the proper format. \n - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Okta SSO connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the [Azure Function App](https://aka.ms/sentineloktaazurefunctioncodev2) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following five (5) application settings individually, with their respective string values (case-sensitive): \n\t\tapiToken\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\turi\n\t\tlogAnalyticsUri (optional)\n - Use the following schema for the `uri` value: `https:///api/v1/logs?since=` Replace `` with your domain. [Click here](https://developer.okta.com/docs/reference/api-overview/#url-namespace) for further details on how to identify your Okta domain namespace. There is no need to add a time value to the URI, the Function App will dynamically append the inital start time of logs to UTC 0:00 for the current UTC date as time value to the URI in the proper format.\n - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: https://.ods.opinsights.azure.us. \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Okta API Token"", ""description"": ""An Okta API Token is required. See the documentation to learn more about the [Okta System Log API](https://developer.okta.com/docs/reference/api/system-log/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Okta%20Single%20Sign-On/Data%20Connectors/OktaSingleSign-On/Connector_REST_API_FunctionApp_Okta.json","true" +"OktaNativePoller_CL","Okta Single Sign-On","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Okta%20Single%20Sign-On","azuresentinel","azure-sentinel-solution-okta","2022-03-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OktaSSO_Polling","Okta","Okta Single Sign-On (Polling CCP)","The [Okta Single Sign-On (SSO)](https://www.okta.com/products/single-sign-on/) connector provides the capability to ingest audit and event logs from the Okta API into Microsoft entinel. The connector provides visibility into these log types in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.","[{""title"": ""Connect OktaSSO"", ""description"": ""Please insert your APIKey"", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""Domain Name"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{placeHolder1}}"", ""placeHolderValue"": """"}]}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Okta%20Single%20Sign-On/Data%20Connectors/OktaNativePollerConnector/azuredeploy_Okta_native_poller_connector.json","true" +"OktaV2_CL","Okta Single Sign-On","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Okta%20Single%20Sign-On","azuresentinel","azure-sentinel-solution-okta","2022-03-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OktaSSOv2","Microsoft","Okta Single Sign-On","The [Okta Single Sign-On (SSO)](https://www.okta.com/products/single-sign-on/) data connector provides the capability to ingest audit and event logs from the Okta Sysem Log API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform and uses the Okta System Log API to fetch the events. The connector supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security event data into a custom columns so that queries don't need to parse it again, thus resulting in better performance.","[{""description"": ""To enable the Okta Single Sign-On for Microsoft Sentinel, provide the required information below and click on Connect.\n>"", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Endpoint"", ""columnValue"": ""properties.request.apiEndpoint""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add domain"", ""title"": ""Add domain"", ""subtitle"": ""Add domain"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Okta Domain Name"", ""placeholder"": ""Okta Domain Name (e.g., myDomain.okta.com)"", ""type"": ""text"", ""name"": ""domainname""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""API Key"", ""type"": ""password"", ""name"": ""apikey""}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Okta API Token"", ""description"": ""An Okta API token. Follow the [following instructions](https://developer.okta.com/docs/guides/create-an-api-token/main/) to create an See the [documentation](https://developer.okta.com/docs/reference/api/system-log/) to learn more about Okta System Log API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Okta%20Single%20Sign-On/Data%20Connectors/OktaNativePollerConnectorV2/OktaSSOv2_DataConnectorDefinition.json","true" +"Okta_CL","Okta Single Sign-On","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Okta%20Single%20Sign-On","azuresentinel","azure-sentinel-solution-okta","2022-03-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OktaSSOv2","Microsoft","Okta Single Sign-On","The [Okta Single Sign-On (SSO)](https://www.okta.com/products/single-sign-on/) data connector provides the capability to ingest audit and event logs from the Okta Sysem Log API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform and uses the Okta System Log API to fetch the events. The connector supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security event data into a custom columns so that queries don't need to parse it again, thus resulting in better performance.","[{""description"": ""To enable the Okta Single Sign-On for Microsoft Sentinel, provide the required information below and click on Connect.\n>"", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Endpoint"", ""columnValue"": ""properties.request.apiEndpoint""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add domain"", ""title"": ""Add domain"", ""subtitle"": ""Add domain"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Okta Domain Name"", ""placeholder"": ""Okta Domain Name (e.g., myDomain.okta.com)"", ""type"": ""text"", ""name"": ""domainname""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""API Key"", ""type"": ""password"", ""name"": ""apikey""}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Okta API Token"", ""description"": ""An Okta API token. Follow the [following instructions](https://developer.okta.com/docs/guides/create-an-api-token/main/) to create an See the [documentation](https://developer.okta.com/docs/reference/api/system-log/) to learn more about Okta System Log API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Okta%20Single%20Sign-On/Data%20Connectors/OktaNativePollerConnectorV2/OktaSSOv2_DataConnectorDefinition.json","true" +"signIns","Okta Single Sign-On","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Okta%20Single%20Sign-On","azuresentinel","azure-sentinel-solution-okta","2022-03-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OktaSSOv2","Microsoft","Okta Single Sign-On (Preview)","The [Okta Single Sign-On (SSO)](https://www.okta.com/products/single-sign-on/) data connector provides the capability to ingest audit and event logs from the Okta Sysem Log API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform and uses the Okta System Log API to fetch the events. The connector supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security event data into a custom columns so that queries don't need to parse it again, thus resulting in better performance.","[{""description"": ""To enable the Okta Single Sign-On for Microsoft Sentinel, provide the required information below and click on Connect.\n>"", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Endpoint"", ""columnValue"": ""properties.request.apiEndpoint""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add domain"", ""title"": ""Add domain"", ""subtitle"": ""Add domain"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Okta Domain Name"", ""placeholder"": ""Okta Domain Name (e.g., myDomain.okta.com)"", ""type"": ""text"", ""name"": ""domainname""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""API Key"", ""type"": ""password"", ""name"": ""apikey""}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Okta API Token"", ""description"": ""An Okta API token. Follow the [following instructions](https://developer.okta.com/docs/guides/create-an-api-token/main/) to create an See the [documentation](https://developer.okta.com/docs/reference/api/system-log/) to learn more about Okta System Log API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Okta%20Single%20Sign-On/Data%20Connectors/OktaNativePollerConnectorV2/azuredeploy_Okta_native_poller_connector_v2.json","true" +"Onapsis_Defend_CL","Onapsis Defend","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Onapsis%20Defend","onapsis","azure-sentinel-solution-onapsis-defend","2025-07-17","2025-07-17","","Onapsis","Partner","https://onapsis.com/support/","","domains","Onapsis","Onapsis Platform","Onapsis Defend Integration","Onapsis Defend Integration is aimed at forwarding alerts and logs collected and detected by Onapsis Platform into Microsoft Sentinel SIEM","[{""title"": ""1. Create ARM Resources and Provide the Required Permissions"", ""description"": ""We will create data collection rule (DCR) and data collection endpoint (DCE) resources. We will also create a Microsoft Entra app registration and assign the required permissions to it."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Automated deployment of Azure resources\nClicking on \""Deploy push connector resources\"" will trigger the creation of DCR and DCE resources.\nIt will then create a Microsoft Entra app registration with client secret and grant permissions on the DCR. This setup enables data to be sent securely to the DCR using a OAuth v2 client credentials.""}}, {""parameters"": {""label"": ""Deploy push connector resources"", ""applicationDisplayName"": ""Onapsis Defend Integration push to Microsoft Sentinel""}, ""type"": ""DeployPushConnectorButton_test""}]}, {""title"": ""2. Maintain the data collection endpoint details and authentication info in Onapsis Defend Integration"", ""description"": ""Share the data collection endpoint URL and authentication info with the Onapsis Defend Integration administrator to configure the Onapsis Defend Integration to send data to the data collection endpoint."", ""instructions"": [{""parameters"": {""label"": ""Use this value to configure as Tenant ID in the LogIngestionAPI credential."", ""fillWith"": [""TenantId""]}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra Application ID"", ""fillWith"": [""ApplicationId""], ""placeholder"": ""Deploy push connector to get the Application ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra Application Secret"", ""fillWith"": [""ApplicationSecret""], ""placeholder"": ""Deploy push connector to get the Application Secret""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Use this value to configure the LogsIngestionURL parameter when deploying the IFlow."", ""fillWith"": [""DataCollectionEndpoint""], ""placeholder"": ""Deploy push connector to get the DCE URI""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""DCR Immutable ID"", ""fillWith"": [""DataCollectionRuleId""], ""placeholder"": ""Deploy push connector to get the DCR ID""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft Entra"", ""description"": ""Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher.""}, {""name"": ""Microsoft Azure"", ""description"": ""Permission to assign Monitoring Metrics Publisher role on data collection rules. Typically requires Azure RBAC Owner or User Access Administrator role.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Onapsis%20Defend/Data%20Connectors/Onapsis.json","true" +"Onapsis_Defend_CL","Onapsis Defend","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Onapsis%20Defend","onapsis","azure-sentinel-solution-onapsis-defend","2025-07-17","2025-07-17","","Onapsis","Partner","https://onapsis.com/support/","","domains","Onapsis","Onapsis SE","Onapsis Defend: Integrate Unmatched SAP Threat Detection & Intel with Microsoft Sentinel","Empower security teams with deep visibility into unique exploit, zero-day, and threat actor activity; suspicious user or insider behavior; sensitive data downloads; security control violations; and more - all enriched by the SAP experts at Onapsis.","[{""title"": ""1. Create ARM Resources and Provide the Required Permissions"", ""description"": ""We will create data collection rule (DCR) and data collection endpoint (DCE) resources. We will also create a Microsoft Entra app registration and assign the required permissions to it."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Automated deployment of Azure resources\nClicking on \""Deploy push connector resources\"" will trigger the creation of DCR and DCE resources.\nIt will then create a Microsoft Entra app registration with client secret and grant permissions on the DCR. This setup enables data to be sent securely to the DCR using a OAuth v2 client credentials.""}}, {""parameters"": {""label"": ""Deploy push connector resources"", ""applicationDisplayName"": ""Onapsis Defend Integration push to Microsoft Sentinel""}, ""type"": ""DeployPushConnectorButton_test""}]}, {""title"": ""2. Maintain the data collection endpoint details and authentication info in Onapsis Defend Integration"", ""description"": ""Share the data collection endpoint URL and authentication info with the Onapsis Defend Integration administrator to configure the Onapsis Defend Integration to send data to the data collection endpoint."", ""instructions"": [{""parameters"": {""label"": ""Tenant ID | Use this value to configure as Tenant ID"", ""fillWith"": [""TenantId""]}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra Application ID | Use this value for the Client ID"", ""fillWith"": [""ApplicationId""], ""placeholder"": ""Deploy push connector to get the Application ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra Application Secret | Use this value for the Token"", ""fillWith"": [""ApplicationSecret""], ""placeholder"": ""Deploy push connector to get the Application Secret""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""LogIngestionURL | Use this value for the URL parameter"", ""fillWith"": [""DataCollectionEndpoint""], ""placeholder"": ""Deploy push connector to get the DCE URI""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""DCR Immutable ID | Use this value for the DCR_ID parameter"", ""fillWith"": [""DataCollectionRuleId""], ""placeholder"": ""Deploy push connector to get the DCR ID""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft Entra"", ""description"": ""Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher.""}, {""name"": ""Microsoft Azure"", ""description"": ""Permission to assign Monitoring Metrics Publisher role on data collection rules. Typically requires Azure RBAC Owner or User Access Administrator role.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Onapsis%20Defend/Data%20Connectors/Onapsis_PUSH_CCP/Onapsis_connectorDefinition.json","true" +"CommonSecurityLog","Onapsis Platform","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Onapsis%20Platform","onapsis","onapsis_mss","2022-05-11","","","Onapsis","Partner","https://onapsis.com/company/contact-us","","domains","OnapsisPlatform","Onapsis","[Deprecated] Onapsis Platform","The Onapsis Connector allows you to export the alarms triggered in the Onapsis Platform into Microsoft Sentinel in real-time. This gives you the ability to monitor the activity on your SAP systems, identify incidents and respond to them quickly.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your Onapsis Console and Microsoft Sentinel. This machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Refer to the Onapsis in-product help to set up log forwarding to the Syslog agent.\n\n> 1. Go to Setup > Third-party integrations > Defend Alarms and follow the instructions for Microsoft Sentinel.\n\n> 2. Make sure your Onapsis Console can reach the proxy machine where the agent is installed - logs should be sent to port 514 using TCP.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Create Onapsis lookup function for incident enrichment"", ""description"": ""[Follow these steps to get this Kusto function](https://aka.ms/sentinel-Onapsis-parser)""}, {""title"": ""5. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Onapsis%20Platform/Data%20Connectors/OnapsisPlatform.json","true" +"","OneIdentity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneIdentity","quest","oneidentity_mss","2022-05-02","","","One Identity","Partner","https://support.oneidentity.com/","","domains","","","","","","","","false" +"OneLoginEventsV2_CL","OneLoginIAM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneLoginIAM","azuresentinel","azure-sentinel-solution-oneloginiam","2022-08-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OneLogin","OneLogin","[DEPRECATED] OneLogin IAM Platform","The [OneLogin](https://www.onelogin.com/) data connector provides the capability to ingest common OneLogin IAM Platform events into Microsoft Sentinel through Webhooks. The OneLogin Event Webhook API which is also known as the Event Broadcaster will send batches of events in near real-time to an endpoint that you specify. When a change occurs in the OneLogin, an HTTPS POST request with event information is sent to a callback data connector URL. Refer to [Webhooks documentation](https://developers.onelogin.com/api-docs/1/events/webhooks) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This data connector uses Azure Functions based on HTTP Trigger for waiting POST requests with logs to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**OneLogin**](https://aka.ms/sentinel-OneLogin-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the OneLogin**\n\n Follow the [instructions](https://onelogin.service-now.com/kb_view_customer.do?sysparm_article=KB0010469) to configure Webhooks.\n\n1. Generate the **OneLoginBearerToken** according to your password policy.\n2. Set Custom Header in the format: Authorization: Bearer .\n3. Use JSON Array Logs Format.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the OneLogin data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the OneLogin data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-OneLogin-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **OneLoginBearerToken** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n6. After deploying open Function App page, select your app, go to the **Functions** and click **Get Function Url** copy it and follow p.7 from STEP 1.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the OneLogin data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-OneLogin-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tOneLoginBearerToken\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Webhooks Credentials/permissions"", ""description"": ""**OneLoginBearerToken**, **Callback URL** are required for working Webhooks. See the documentation to learn more about [configuring Webhooks](https://onelogin.service-now.com/kb_view_customer.do?sysparm_article=KB0010469).You need to generate **OneLoginBearerToken** according to your security requirements and use it in **Custom Headers** section in format: Authorization: Bearer **OneLoginBearerToken**. Logs Format: JSON Array.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneLoginIAM/Data%20Connectors/OneLogin_Webhooks_FunctionApp.json","true" +"OneLoginUsersV2_CL","OneLoginIAM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneLoginIAM","azuresentinel","azure-sentinel-solution-oneloginiam","2022-08-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OneLogin","OneLogin","[DEPRECATED] OneLogin IAM Platform","The [OneLogin](https://www.onelogin.com/) data connector provides the capability to ingest common OneLogin IAM Platform events into Microsoft Sentinel through Webhooks. The OneLogin Event Webhook API which is also known as the Event Broadcaster will send batches of events in near real-time to an endpoint that you specify. When a change occurs in the OneLogin, an HTTPS POST request with event information is sent to a callback data connector URL. Refer to [Webhooks documentation](https://developers.onelogin.com/api-docs/1/events/webhooks) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This data connector uses Azure Functions based on HTTP Trigger for waiting POST requests with logs to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**OneLogin**](https://aka.ms/sentinel-OneLogin-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the OneLogin**\n\n Follow the [instructions](https://onelogin.service-now.com/kb_view_customer.do?sysparm_article=KB0010469) to configure Webhooks.\n\n1. Generate the **OneLoginBearerToken** according to your password policy.\n2. Set Custom Header in the format: Authorization: Bearer .\n3. Use JSON Array Logs Format.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the OneLogin data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the OneLogin data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-OneLogin-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **OneLoginBearerToken** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n6. After deploying open Function App page, select your app, go to the **Functions** and click **Get Function Url** copy it and follow p.7 from STEP 1.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the OneLogin data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-OneLogin-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tOneLoginBearerToken\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Webhooks Credentials/permissions"", ""description"": ""**OneLoginBearerToken**, **Callback URL** are required for working Webhooks. See the documentation to learn more about [configuring Webhooks](https://onelogin.service-now.com/kb_view_customer.do?sysparm_article=KB0010469).You need to generate **OneLoginBearerToken** according to your security requirements and use it in **Custom Headers** section in format: Authorization: Bearer **OneLoginBearerToken**. Logs Format: JSON Array.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneLoginIAM/Data%20Connectors/OneLogin_Webhooks_FunctionApp.json","true" +"OneLogin_CL","OneLoginIAM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneLoginIAM","azuresentinel","azure-sentinel-solution-oneloginiam","2022-08-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OneLogin","OneLogin","[DEPRECATED] OneLogin IAM Platform","The [OneLogin](https://www.onelogin.com/) data connector provides the capability to ingest common OneLogin IAM Platform events into Microsoft Sentinel through Webhooks. The OneLogin Event Webhook API which is also known as the Event Broadcaster will send batches of events in near real-time to an endpoint that you specify. When a change occurs in the OneLogin, an HTTPS POST request with event information is sent to a callback data connector URL. Refer to [Webhooks documentation](https://developers.onelogin.com/api-docs/1/events/webhooks) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This data connector uses Azure Functions based on HTTP Trigger for waiting POST requests with logs to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**OneLogin**](https://aka.ms/sentinel-OneLogin-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the OneLogin**\n\n Follow the [instructions](https://onelogin.service-now.com/kb_view_customer.do?sysparm_article=KB0010469) to configure Webhooks.\n\n1. Generate the **OneLoginBearerToken** according to your password policy.\n2. Set Custom Header in the format: Authorization: Bearer .\n3. Use JSON Array Logs Format.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the OneLogin data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the OneLogin data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-OneLogin-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **OneLoginBearerToken** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n6. After deploying open Function App page, select your app, go to the **Functions** and click **Get Function Url** copy it and follow p.7 from STEP 1.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the OneLogin data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-OneLogin-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tOneLoginBearerToken\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Webhooks Credentials/permissions"", ""description"": ""**OneLoginBearerToken**, **Callback URL** are required for working Webhooks. See the documentation to learn more about [configuring Webhooks](https://onelogin.service-now.com/kb_view_customer.do?sysparm_article=KB0010469).You need to generate **OneLoginBearerToken** according to your security requirements and use it in **Custom Headers** section in format: Authorization: Bearer **OneLoginBearerToken**. Logs Format: JSON Array.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneLoginIAM/Data%20Connectors/OneLogin_Webhooks_FunctionApp.json","true" +"OneLoginEventsV2_CL","OneLoginIAM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneLoginIAM","azuresentinel","azure-sentinel-solution-oneloginiam","2022-08-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OneLoginIAMLogsCCPDefinition","Microsoft","OneLogin IAM Platform (via Codeless Connector Framework)","The [OneLogin](https://www.onelogin.com/) data connector provides the capability to ingest common OneLogin IAM Platform events into Microsoft Sentinel through REST API by using OneLogin [Events API](https://developers.onelogin.com/api-docs/1/events/get-events) and OneLogin [Users API](https://developers.onelogin.com/api-docs/1/users/get-users). The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""description"": ""To ingest data from OneLogin IAM to Microsoft Sentinel, you have to click on Add Domain button below then you get a pop up to fill the details, provide the required information and click on Connect. You can see the domain endpoints connected in the grid.\n>"", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Endpoint"", ""columnValue"": ""properties.request.apiEndpoint""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add domain"", ""title"": ""Add domain"", ""subtitle"": ""Add domain"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""OneLogin Domain"", ""placeholder"": ""Enter your Company's OneLogin Domain"", ""type"": ""text"", ""name"": ""domainName"", ""required"": true, ""description"": ""For example, if your OneLogin Domain is test.onelogin.com, you need to enter only test in the above field.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client ID"", ""placeholder"": ""Enter your Client ID"", ""type"": ""text"", ""name"": ""cId"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client Secret"", ""placeholder"": ""Enter your Client Secret"", ""type"": ""password"", ""name"": ""cSec"", ""required"": true}}]}]}}], ""title"": ""Connect OneLogin IAM Platform to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""OneLogin IAM API Credentials"", ""description"": ""To create API Credentials follow the document link provided here, [Click Here](https://developers.onelogin.com/api-docs/1/getting-started/working-with-api-credentials). \n Make sure to have an account type of either account owner or administrator to create the API credentials. \n Once you create the API Credentials you get your Client ID and Client Secret.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneLoginIAM/Data%20Connectors/OneLoginIAMLogs_ccp/OneLoginIAMLogs_ConnectorDefinition.json","true" +"OneLoginUsersV2_CL","OneLoginIAM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneLoginIAM","azuresentinel","azure-sentinel-solution-oneloginiam","2022-08-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OneLoginIAMLogsCCPDefinition","Microsoft","OneLogin IAM Platform (via Codeless Connector Framework)","The [OneLogin](https://www.onelogin.com/) data connector provides the capability to ingest common OneLogin IAM Platform events into Microsoft Sentinel through REST API by using OneLogin [Events API](https://developers.onelogin.com/api-docs/1/events/get-events) and OneLogin [Users API](https://developers.onelogin.com/api-docs/1/users/get-users). The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""description"": ""To ingest data from OneLogin IAM to Microsoft Sentinel, you have to click on Add Domain button below then you get a pop up to fill the details, provide the required information and click on Connect. You can see the domain endpoints connected in the grid.\n>"", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Endpoint"", ""columnValue"": ""properties.request.apiEndpoint""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add domain"", ""title"": ""Add domain"", ""subtitle"": ""Add domain"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""OneLogin Domain"", ""placeholder"": ""Enter your Company's OneLogin Domain"", ""type"": ""text"", ""name"": ""domainName"", ""required"": true, ""description"": ""For example, if your OneLogin Domain is test.onelogin.com, you need to enter only test in the above field.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client ID"", ""placeholder"": ""Enter your Client ID"", ""type"": ""text"", ""name"": ""cId"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client Secret"", ""placeholder"": ""Enter your Client Secret"", ""type"": ""password"", ""name"": ""cSec"", ""required"": true}}]}]}}], ""title"": ""Connect OneLogin IAM Platform to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""OneLogin IAM API Credentials"", ""description"": ""To create API Credentials follow the document link provided here, [Click Here](https://developers.onelogin.com/api-docs/1/getting-started/working-with-api-credentials). \n Make sure to have an account type of either account owner or administrator to create the API credentials. \n Once you create the API Credentials you get your Client ID and Client Secret.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneLoginIAM/Data%20Connectors/OneLoginIAMLogs_ccp/OneLoginIAMLogs_ConnectorDefinition.json","true" +"OneTrustMetadataV3_CL","OneTrust","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneTrust","onetrustllc1594047340198","azure-sentinel-solution-onetrust","2025-10-24","2025-10-24","","OneTrust, LLC","Partner","https://www.onetrust.com/support/","","domains","OnetrustPush","OneTrust","OneTrust","The OneTrust connector for Microsoft Sentinel provides the capability to have near real time visibility into where sensitive data has been located or remediated across across Google Cloud and other OneTrust supported data sources.","[{""title"": ""1. Create ARM Resources and Provide the Required Permissions"", ""description"": ""This connector reads data from the tables that OneTrust uses in a Microsoft Analytics Workspace. If OneTrust's data forwarding option is enabled then raw event data can be sent to the Microsoft Sentinel Ingestion API."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Automated Configuration and Secure Data Ingestion with Entra Application \nClicking on \""Deploy\"" will trigger the creation of Log Analytics tables and a Data Collection Rule (DCR). \nIt will then create an Entra application, link the DCR to it, and set the entered secret in the application. This setup enables data to be sent securely to the DCR using an Entra token.""}}, {""parameters"": {""label"": ""Deploy OneTrust connector resources"", ""applicationDisplayName"": ""OneTrust Connector Application""}, ""type"": ""DeployPushConnectorButton""}]}, {""title"": ""2. Push your logs into the workspace"", ""description"": ""Use the following parameters to configure the your machine to send the logs to the workspace."", ""instructions"": [{""parameters"": {""label"": ""Tenant ID (Directory ID)"", ""fillWith"": [""TenantId""]}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Application ID"", ""fillWith"": [""ApplicationId""], ""placeholder"": ""Deploy push connector to get the App Registration Application ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Secret"", ""fillWith"": [""ApplicationSecret""], ""placeholder"": ""Deploy push connector to get the App Registration Secret""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Endpoint Uri"", ""fillWith"": [""DataCollectionEndpoint""], ""placeholder"": ""Deploy push connector to get the Data Collection Endpoint Uri""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Rule Immutable ID"", ""fillWith"": [""DataCollectionRuleId""], ""placeholder"": ""Deploy push connector to get the Data Collection Rule Immutable ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""OneTrust Metadata Stream Name"", ""value"": ""Custom-OneTrustMetadataV3""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft Entra"", ""description"": ""Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher.""}, {""name"": ""Microsoft Azure"", ""description"": ""Permission to assign Monitoring Metrics Publisher role on data collection rule (DCR). Typically requires Azure RBAC Owner or User Access Administrator role""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OneTrust/Data%20Connectors/OneTrustLogs_CCF/OneTrustLogs_connectorDefinition.json","true" +"","Open Systems","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Open%20Systems","opensystemsag1582030008223","azure-sentinel-solution-osag","2025-05-12","","","Open Systems","Partner","https://www.open-systems.com/support","","domains","","","","","","","","false" +"","OpenCTI","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OpenCTI","azuresentinel","azure-sentinel-solution-opencti","2022-09-22","2022-09-22","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"Syslog","OpenVPN","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OpenVPN","azuresentinel","azure-sentinel-solution-openvpn","2022-08-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OpenVPN","OpenVPN","[Deprecated] OpenVPN Server","The [OpenVPN](https://github.com/OpenVPN) data connector provides the capability to ingest OpenVPN Server logs into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**OpenVpnEvent**](https://aka.ms/sentinel-openvpn-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Server where the OpenVPN are forwarded.\n\n> Logs from OpenVPN Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n2. Select **Apply below configuration to my machines** and select the facilities and severities.\n3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Check your OpenVPN logs."", ""description"": ""OpenVPN server logs are written into common syslog file (depending on the Linux distribution used: e.g. /var/log/messages)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OpenVPN/Data%20Connectors/OpenVPN_Syslog.json","true" +"OCI_LogsV2_CL","Oracle Cloud Infrastructure","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Oracle%20Cloud%20Infrastructure","azuresentinel","azure-sentinel-solution-ocilogs","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OCI-Connector-CCP-Definition","Microsoft","Oracle Cloud Infrastructure (via Codeless Connector Framework)","The Oracle Cloud Infrastructure (OCI) data connector provides the capability to ingest OCI Logs from [OCI Stream](https://docs.oracle.com/iaas/Content/Streaming/Concepts/streamingoverview.htm) into Microsoft Sentinel using the [OCI Streaming REST API](https://docs.oracle.com/iaas/api/#/streaming/streaming/20180418).","[{""title"": ""Connect to OCI Streaming API to start collecting Event logs in Microsoft Sentinel"", ""description"": ""1) Log in to the OCI console and access the navigation menu.\n2) In the navigation menu, go to \""Analytics & AI\"" -> \""Streaming\"".\n3) Click \""Create Stream\"".\n4) Select an existing \""Stream Pool\"" or create a new one.\n5) Enter the following details:\n - \""Stream Name\""\n - \""Retention\""\n - \""Number of Partitions\""\n - \""Total Write Rate\""\n - \""Total Read Rate\"" (based on your data volume)\n6) In the navigation menu, go to \""Logging\"" -> \""Service Connectors\"".\n7) Click \""Create Service Connector\"".\n8) Enter the following details:\n - \""Connector Name\""\n - \""Description\""\n - \""Resource Compartment\""\n9) Select the \""Source\"": \""Logging\"".\n10) Select the \""Target\"": \""Streaming\"".\n11) (Optional) Configure \""Log Group\"", \""Filters\"", or use a \""custom search query\"" to stream only the required logs.\n12) Configure the \""Target\"" by selecting the previously created stream.\n13) Click \""Create\"".\n14) Follow the documentation to create a [Private Key and API Key Configuration File](https://docs.oracle.com/en-us/iaas/Content/API/Concepts/apisigningkey.htm).\n\r"", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Tenant Id"", ""columnValue"": ""properties.auth.tenantId""}, {""columnName"": ""Stream"", ""columnValue"": ""properties.request.streamId""}, {""columnName"": ""Partition"", ""columnValue"": ""properties.request.partitions""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""label"": ""Add stream"", ""title"": ""Add Oracle Cloud Infrastructure Data Stream"", ""subtitle"": ""Connect to Oracle Cloud Infrastructure Data"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Stream OCID"", ""placeholder"": ""Provide the OCI Stream OCID (E.g. ocid1.stream.oc1..xxxxxxEXAMPLExxxxxx)"", ""type"": ""text"", ""name"": ""streamId"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Public Message Endpoint of the stream (Service Endpoint Base URL)"", ""placeholder"": ""Provide the Service Endpoint Base URL: (https://cell-1.streaming.ap-hyderabad-1.oci.oraclecloud.com)"", ""type"": ""text"", ""name"": ""serviceEndpointBaseUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Cursor Type"", ""name"": ""cursorType"", ""options"": [{""key"": ""IndividualCursor"", ""text"": ""Individual Cursor""}], ""required"": true}}, {""type"": ""InfoMessage"", ""parameters"": {""text"": ""The partition ID uses zero-based indexing. For example, if a stream has 3 partitions, the valid partition IDs are 0, 1, or 2.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Partition Id"", ""placeholder"": ""Provide the Partition Id. (E.g. 0 or 1 or 2)"", ""type"": ""text"", ""name"": ""partitions"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Tenant ID"", ""placeholder"": ""OCI Tenant ID (E.g. ocid1.tenancy.oc1..xxxxxxEXAMPLExxxxxx)"", ""type"": ""text"", ""name"": ""tenantId"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""User ID"", ""placeholder"": ""Provide the User Id. (E.g. ocid1.user.oc1..xxxxxxEXAMPLExxxxxx)"", ""type"": ""text"", ""name"": ""userId"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Pem File Content"", ""placeholder"": ""Provide the Pem File content."", ""type"": ""password"", ""name"": ""pemFile"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Fingerprint"", ""placeholder"": ""Provide the fingerprint for the Pem File Content. (E.g. 12:34:56:78:90:AB:CD:EF:GH:IJ:KL:MN:OP)"", ""type"": ""password"", ""name"": ""publicFingerprint"", ""validations"": {""required"": true}}}, {""type"": ""InfoMessage"", ""parameters"": {""text"": ""If your PEM file is not encrypted, leave Pass Phrase as blank.""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Pem File Pass Phrase"", ""placeholder"": ""Just Leave blank If not encrypted)"", ""type"": ""password"", ""name"": ""passPhrase""}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true, ""action"": false}}], ""customs"": [{""name"": ""OCI Streaming API access"", ""description"": ""Access to the OCI Streaming API through a API Signing Keys is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Oracle%20Cloud%20Infrastructure/Data%20Connectors/Oracle_Cloud_Infrastructure_CCP/OCI_DataConnector_DataConnectorDefinition.json","true" +"OCI_Logs_CL","Oracle Cloud Infrastructure","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Oracle%20Cloud%20Infrastructure","azuresentinel","azure-sentinel-solution-ocilogs","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OracleCloudInfrastructureLogsConnector","Oracle","[DEPRECATED] Oracle Cloud Infrastructure","The Oracle Cloud Infrastructure (OCI) data connector provides the capability to ingest OCI Logs from [OCI Stream](https://docs.oracle.com/iaas/Content/Streaming/Concepts/streamingoverview.htm) into Microsoft Sentinel using the [OCI Streaming REST API](https://docs.oracle.com/iaas/api/#/streaming/streaming/20180418).

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector can go over the 500 column limit of log Analytics. When this happens some logs will be dropped. For this reason the connector can be unrealiable depending on the logs that are being generated and collected.""}, {""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Azure Blob Storage API to pull logs into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**OCILogs**](https://aka.ms/sentinel-OracleCloudInfrastructureLogsConnector-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Creating Stream**\n\n1. Log in to OCI console and go to *navigation menu* -> *Analytics & AI* -> *Streaming*\n2. Click *Create Stream*\n3. Select Stream Pool or create a new one\n4. Provide the *Stream Name*, *Retention*, *Number of Partitions*, *Total Write Rate*, *Total Read Rate* based on your data amount.\n5. Go to *navigation menu* -> *Logging* -> *Service Connectors*\n6. Click *Create Service Connector*\n6. Provide *Connector Name*, *Description*, *Resource Compartment*\n7. Select Source: Logging\n8. Select Target: Streaming\n9. (Optional) Configure *Log Group*, *Filters* or use custom search query to stream only logs that you need.\n10. Configure Target - select the strem created before.\n11. Click *Create*\n\nCheck the documentation to get more information about [Streaming](https://docs.oracle.com/en-us/iaas/Content/Streaming/home.htm) and [Service Connectors](https://docs.oracle.com/en-us/iaas/Content/service-connector-hub/home.htm).""}, {""title"": """", ""description"": ""**STEP 2 - Creating credentials for OCI REST API**\n\nFollow the documentation to [create Private Key and API Key Configuration File.](https://docs.oracle.com/en-us/iaas/Content/API/Concepts/apisigningkey.htm)\n\n>**IMPORTANT:** Save Private Key and API Key Configuration File created during this step as they will be used during deployment step.""}, {""title"": """", ""description"": ""**STEP 3 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the OCI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as OCI API credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the OCI data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-OracleCloudInfrastructureLogsConnector-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Microsoft Sentinel Workspace Id**, **Microsoft Sentinel Shared Key**, **User**, **Key_content**, **Pass_phrase**, **Fingerprint**, **Tenancy**, **Region**, **Message Endpoint**, **Stream Ocid**\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the OCI data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the [Azure Function App](https://aka.ms/sentinel-OracleCloudInfrastructureLogsConnector-functionapp) file. Extract archive to your local development computer..\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tAzureSentinelWorkspaceId\n\t\tAzureSentinelSharedKey\n\t\tuser\n\t\tkey_content\n\t\tpass_phrase (Optional)\n\t\tfingerprint\n\t\ttenancy\n\t\tregion\n\t\tMessage Endpoint\n\t\tStreamOcid\n\t\tlogAnalyticsUri (Optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://WORKSPACE_ID.ods.opinsights.azure.us`.\n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""OCI API Credentials"", ""description"": "" **API Key Configuration File** and **Private Key** are required for OCI API connection. See the documentation to learn more about [creating keys for API access](https://docs.oracle.com/en-us/iaas/Content/API/Concepts/apisigningkey.htm)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Oracle%20Cloud%20Infrastructure/Data%20Connectors/OCI_logs_API_FunctionApp.json","true" +"Syslog","OracleDatabaseAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OracleDatabaseAudit","azuresentinel","azure-sentinel-solution-oracledbaudit","2021-11-05","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OracleDatabaseAudit","Oracle","[Deprecated] Oracle Database Audit","The Oracle DB Audit data connector provides the capability to ingest [Oracle Database](https://www.oracle.com/database/technologies/) audit events into Microsoft Sentinel through the syslog. Refer to [documentation](https://docs.oracle.com/en/database/oracle/oracle-database/21/dbseg/introduction-to-auditing.html#GUID-94381464-53A3-421B-8F13-BD171C867405) for more information.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Oracle Database Audit and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OracleDatabaseAudit/Parsers/OracleDatabaseAuditEvent.txt). The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n2. Select **Apply below configuration to my machines** and select the facilities and severities.\n3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure Oracle Database Audit events to be sent to Syslog"", ""description"": ""Follow the below instructions \n\n 1. Create the Oracle database [Follow these steps.](https://learn.microsoft.com/en-us/azure/virtual-machines/workloads/oracle/oracle-database-quick-create) \n\n 2. Login to Oracle database created from the above step [Follow these steps.](https://docs.oracle.com/cd/F49540_01/DOC/server.815/a67772/create.htm) \n\n 3. Enable unified logging over syslog by **Alter the system to enable unified logging** [Following these steps.](https://docs.oracle.com/en/database/oracle/oracle-database/21/refrn/UNIFIED_AUDIT_COMMON_SYSTEMLOG.html#GUID-9F26BC8E-1397-4B0E-8A08-3B12E4F9ED3A) \n\n 4. Create and **enable an Audit policy for unified auditing** [Follow these steps.](https://docs.oracle.com/en/database/oracle/oracle-database/19/sqlrf/CREATE-AUDIT-POLICY-Unified-Auditing.html#GUID-8D6961FB-2E50-46F5-81F7-9AEA314FC693) \n\n 5. **Enabling syslog and Event Viewer** Captures for the Unified Audit Trail [Follow these steps.](https://docs.oracle.com/en/database/oracle/oracle-database/18/dbseg/administering-the-audit-trail.html#GUID-3EFB75DB-AE1C-44E6-B46E-30E5702B0FC4)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OracleDatabaseAudit/Data%20Connectors/Connector_OracleDatabaseAudit.json","true" +"OracleWebLogicServer_CL","OracleWebLogicServer","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OracleWebLogicServer","azuresentinel","azure-sentinel-solution-oracleweblogicserver","2022-01-06","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","OracleWebLogicServer","Oracle","[Deprecated] Oracle WebLogic Server","OracleWebLogicServer data connector provides the capability to ingest [OracleWebLogicServer](https://docs.oracle.com/en/middleware/standalone/weblogic-server/index.html) events into Microsoft Sentinel. Refer to [OracleWebLogicServer documentation](https://docs.oracle.com/en/middleware/standalone/weblogic-server/14.1.1.0/index.html) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias OracleWebLogicServerEvent and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OracleWebLogicServer/Parsers/OracleWebLogicServerEvent.yaml). The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Oracle WebLogic Server where the logs are generated.\n\n> Logs from Oracle WebLogic Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the custom log directory to be collected"", ""instructions"": [{""parameters"": {""linkType"": ""OpenCustomLogsSettings""}, ""type"": ""InstallAgent""}]}, {""title"": """", ""description"": ""1. Select the link above to open your workspace advanced settings \n2. From the left pane, select **Data**, select **Custom Logs** and click **Add+**\n3. Click **Browse** to upload a sample of a OracleWebLogicServer log file (e.g. server.log). Then, click **Next >**\n4. Select **New line** as the record delimiter and click **Next >**\n5. Select **Windows** or **Linux** and enter the path to OracleWebLogicServer logs based on your configuration. Example: \n - **Linux** Directory: 'DOMAIN_HOME/servers/server_name/logs/*.log'\n - **Windows** Directory: 'DOMAIN_NAME\\servers\\SERVER_NAME\\logs\\*.log'\n6. After entering the path, click the '+' symbol to apply, then click **Next >** \n7. Add **OracleWebLogicServer_CL** as the custom log Name and click **Done**""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/OracleWebLogicServer/Data%20Connectors/Connector_OracleWebLogicServer_agent.json","true" +"OrcaAlerts_CL","Orca Security Alerts","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Orca%20Security%20Alerts","orcasecurityinc1621870991703","orca_security_alerts_mss","2022-05-10","","","Orca Security","Partner","https://orca.security/about/contact/","","domains","OrcaSecurityAlerts","Orca Security","Orca Security Alerts","The Orca Security Alerts connector allows you to easily export Alerts logs to Microsoft Sentinel.","[{""title"": """", ""description"": ""Follow [guidance](https://orcasecurity.zendesk.com/hc/en-us/articles/360043941992-Azure-Sentinel-configuration) for integrating Orca Security Alerts logs with Microsoft Sentinel."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Orca%20Security%20Alerts/Data%20Connectors/OrcaSecurityAlerts.json","true" +"","PCI DSS Compliance","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PCI%20DSS%20Compliance","azuresentinel","azure-sentinel-solution-pcidsscompliance","2022-06-29","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"","PDNS Block Data Connector","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PDNS%20Block%20Data%20Connector","azuresentinel","azure-sentinel-solution-pdnsblockdataconnector","2023-03-31","","","Nominet PDNS Support","Partner","https://www.protectivedns.service.ncsc.gov.uk/pdns","","domains","","","","","","","","false" +"CommonSecurityLog","Palo Alto - XDR (Cortex)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20-%20XDR%20%28Cortex%29","","","","","","","","","","","PaloAltoNetworksCortex","Palo Alto Networks","Palo Alto Networks Cortex XDR","The Palo Alto Networks Cortex XDR connector gives you an easy way to connect to your Cortex XDR logs with Microsoft Sentinel. This increases the visibility of your endpoint security. It will give you better ability to monitor your resources by creating custom Workbooks, analytics rules, Incident investigation, and evidence gathering.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Azure Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Azure Sentinel will use as the proxy between your security solution and Azure Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Azure Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Palo Alto Networks (Cortex) logs to Syslog agent"", ""description"": ""\n\n> 1. Go to [Cortex Settings and Configurations](https://inspira.xdr.in.paloaltonetworks.com/configuration/external-alerting) and Click to add New Server under External Applications.\n\n> 2. Then specify the name and Give public IP of your syslog server in Destination. \n\n> 3. Give Port number as 514 and from Facility field select FAC_SYSLOG from dropdown. \n\n> 4. Select Protocol as UDP and hit Create.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20-%20XDR%20%28Cortex%29/Data%20Connectors/Connector_PaloAlto_XDR_CEF.json","true" +"PaloAltoCortexXDR_Alerts_CL","Palo Alto Cortex XDR CCP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Cortex%20XDR%20CCP","azuresentinel","azure-sentinel-solution-cortexccp","2024-12-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CortexXDRDataConnector","Microsoft","Palo Alto Cortex XDR","The [Palo Alto Cortex XDR](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/09agw06t5dpvw-cortex-xdr-rest-api) data connector allows ingesting logs from the Palo Alto Cortex XDR API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the Palo Alto Cortex XDR API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the Palo Alto Cortex XDR API \n Follow the instructions to obtain the credentials. you can also follow this [guide](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/3u3j0e7hcx8t1-get-started-with-cortex-xdr-ap-is) to generate API key.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve API URL\n 1.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 1.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 1.3. Under [**Integrations**] click on [**API Keys**].\n 1.4. In the [**Settings**] Page click on [**Copy API URL**] in the top right corner.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve API Token\n 2.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 2.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 2.3. Under [**Integrations**] click on [**API Keys**].\n 2.4. In the [**Settings**] Page click on [**New Key**] in the top right corner.\n 2.5. Choose security level, role, choose Standard and click on [**Generate**]\n 2.6. Copy the API Token, once it generated the [**API Token ID**] can be found under the ID column""}}, {""parameters"": {""label"": ""Base API URL"", ""placeholder"": ""https://api-example.xdr.au.paloaltonetworks.com"", ""type"": ""text"", ""name"": ""apiUrl""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""API Key ID"", ""placeholder"": ""API ID"", ""type"": ""text"", ""name"": ""apiId""}, ""type"": ""Textbox""}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""password"", ""name"": ""apiToken""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Cortex%20XDR%20CCP/Data%20Connectors/CortexXDR_ccp/DataConnectorDefinition.json","true" +"PaloAltoCortexXDR_Audit_Agent_CL","Palo Alto Cortex XDR CCP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Cortex%20XDR%20CCP","azuresentinel","azure-sentinel-solution-cortexccp","2024-12-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CortexXDRDataConnector","Microsoft","Palo Alto Cortex XDR","The [Palo Alto Cortex XDR](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/09agw06t5dpvw-cortex-xdr-rest-api) data connector allows ingesting logs from the Palo Alto Cortex XDR API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the Palo Alto Cortex XDR API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the Palo Alto Cortex XDR API \n Follow the instructions to obtain the credentials. you can also follow this [guide](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/3u3j0e7hcx8t1-get-started-with-cortex-xdr-ap-is) to generate API key.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve API URL\n 1.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 1.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 1.3. Under [**Integrations**] click on [**API Keys**].\n 1.4. In the [**Settings**] Page click on [**Copy API URL**] in the top right corner.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve API Token\n 2.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 2.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 2.3. Under [**Integrations**] click on [**API Keys**].\n 2.4. In the [**Settings**] Page click on [**New Key**] in the top right corner.\n 2.5. Choose security level, role, choose Standard and click on [**Generate**]\n 2.6. Copy the API Token, once it generated the [**API Token ID**] can be found under the ID column""}}, {""parameters"": {""label"": ""Base API URL"", ""placeholder"": ""https://api-example.xdr.au.paloaltonetworks.com"", ""type"": ""text"", ""name"": ""apiUrl""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""API Key ID"", ""placeholder"": ""API ID"", ""type"": ""text"", ""name"": ""apiId""}, ""type"": ""Textbox""}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""password"", ""name"": ""apiToken""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Cortex%20XDR%20CCP/Data%20Connectors/CortexXDR_ccp/DataConnectorDefinition.json","true" +"PaloAltoCortexXDR_Audit_Management_CL","Palo Alto Cortex XDR CCP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Cortex%20XDR%20CCP","azuresentinel","azure-sentinel-solution-cortexccp","2024-12-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CortexXDRDataConnector","Microsoft","Palo Alto Cortex XDR","The [Palo Alto Cortex XDR](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/09agw06t5dpvw-cortex-xdr-rest-api) data connector allows ingesting logs from the Palo Alto Cortex XDR API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the Palo Alto Cortex XDR API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the Palo Alto Cortex XDR API \n Follow the instructions to obtain the credentials. you can also follow this [guide](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/3u3j0e7hcx8t1-get-started-with-cortex-xdr-ap-is) to generate API key.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve API URL\n 1.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 1.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 1.3. Under [**Integrations**] click on [**API Keys**].\n 1.4. In the [**Settings**] Page click on [**Copy API URL**] in the top right corner.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve API Token\n 2.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 2.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 2.3. Under [**Integrations**] click on [**API Keys**].\n 2.4. In the [**Settings**] Page click on [**New Key**] in the top right corner.\n 2.5. Choose security level, role, choose Standard and click on [**Generate**]\n 2.6. Copy the API Token, once it generated the [**API Token ID**] can be found under the ID column""}}, {""parameters"": {""label"": ""Base API URL"", ""placeholder"": ""https://api-example.xdr.au.paloaltonetworks.com"", ""type"": ""text"", ""name"": ""apiUrl""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""API Key ID"", ""placeholder"": ""API ID"", ""type"": ""text"", ""name"": ""apiId""}, ""type"": ""Textbox""}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""password"", ""name"": ""apiToken""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Cortex%20XDR%20CCP/Data%20Connectors/CortexXDR_ccp/DataConnectorDefinition.json","true" +"PaloAltoCortexXDR_Endpoints_CL","Palo Alto Cortex XDR CCP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Cortex%20XDR%20CCP","azuresentinel","azure-sentinel-solution-cortexccp","2024-12-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CortexXDRDataConnector","Microsoft","Palo Alto Cortex XDR","The [Palo Alto Cortex XDR](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/09agw06t5dpvw-cortex-xdr-rest-api) data connector allows ingesting logs from the Palo Alto Cortex XDR API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the Palo Alto Cortex XDR API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the Palo Alto Cortex XDR API \n Follow the instructions to obtain the credentials. you can also follow this [guide](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/3u3j0e7hcx8t1-get-started-with-cortex-xdr-ap-is) to generate API key.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve API URL\n 1.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 1.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 1.3. Under [**Integrations**] click on [**API Keys**].\n 1.4. In the [**Settings**] Page click on [**Copy API URL**] in the top right corner.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve API Token\n 2.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 2.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 2.3. Under [**Integrations**] click on [**API Keys**].\n 2.4. In the [**Settings**] Page click on [**New Key**] in the top right corner.\n 2.5. Choose security level, role, choose Standard and click on [**Generate**]\n 2.6. Copy the API Token, once it generated the [**API Token ID**] can be found under the ID column""}}, {""parameters"": {""label"": ""Base API URL"", ""placeholder"": ""https://api-example.xdr.au.paloaltonetworks.com"", ""type"": ""text"", ""name"": ""apiUrl""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""API Key ID"", ""placeholder"": ""API ID"", ""type"": ""text"", ""name"": ""apiId""}, ""type"": ""Textbox""}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""password"", ""name"": ""apiToken""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Cortex%20XDR%20CCP/Data%20Connectors/CortexXDR_ccp/DataConnectorDefinition.json","true" +"PaloAltoCortexXDR_Incidents_CL","Palo Alto Cortex XDR CCP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Cortex%20XDR%20CCP","azuresentinel","azure-sentinel-solution-cortexccp","2024-12-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","CortexXDRDataConnector","Microsoft","Palo Alto Cortex XDR","The [Palo Alto Cortex XDR](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/09agw06t5dpvw-cortex-xdr-rest-api) data connector allows ingesting logs from the Palo Alto Cortex XDR API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the Palo Alto Cortex XDR API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the Palo Alto Cortex XDR API \n Follow the instructions to obtain the credentials. you can also follow this [guide](https://cortex-panw.stoplight.io/docs/cortex-xdr/branches/main/3u3j0e7hcx8t1-get-started-with-cortex-xdr-ap-is) to generate API key.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve API URL\n 1.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 1.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 1.3. Under [**Integrations**] click on [**API Keys**].\n 1.4. In the [**Settings**] Page click on [**Copy API URL**] in the top right corner.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve API Token\n 2.1. Log in to the Palo Alto Cortex XDR [**Management Console**] with Admin user credentials\n 2.2. In the [**Management Console**], click [**Settings**] -> [**Configurations**] \n 2.3. Under [**Integrations**] click on [**API Keys**].\n 2.4. In the [**Settings**] Page click on [**New Key**] in the top right corner.\n 2.5. Choose security level, role, choose Standard and click on [**Generate**]\n 2.6. Copy the API Token, once it generated the [**API Token ID**] can be found under the ID column""}}, {""parameters"": {""label"": ""Base API URL"", ""placeholder"": ""https://api-example.xdr.au.paloaltonetworks.com"", ""type"": ""text"", ""name"": ""apiUrl""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""API Key ID"", ""placeholder"": ""API ID"", ""type"": ""text"", ""name"": ""apiId""}, ""type"": ""Textbox""}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""password"", ""name"": ""apiToken""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Cortex%20XDR%20CCP/Data%20Connectors/CortexXDR_ccp/DataConnectorDefinition.json","true" +"CortexXpanseAlerts_CL","Palo Alto Cortex Xpanse CCF","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Cortex%20Xpanse%20CCF","azuresentinel","azure-sentinel-solution-cortexxpanse","2024-12-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PaloAltoExpanseCCPDefinition","Microsoft","Palo Alto Cortex Xpanse (via Codeless Connector Framework)","The Palo Alto Cortex Xpanse data connector ingests alerts data into Microsoft Sentinel.","[{""description"": ""To ingest data from Palo Alto Cortex Xpanse to Microsoft Sentinel, click on **Add Domain**. Fill in the required details in the pop-up and click Connect. You will see connected domain endpoints in the grid below. To get the Auth ID and API Key, go to **Settings \u2192 Configuration \u2192 Integrations \u2192 API Keys** in the Cortex Xpanse portal and generate new credentials."", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Endpoint"", ""columnValue"": ""properties.request.apiEndpoint""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add domain"", ""title"": ""Add domain"", ""subtitle"": ""Add domain"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Domain Name"", ""placeholder"": ""e.g., example.crtx.us.paloaltonetworks.com"", ""type"": ""text"", ""name"": ""domainName"", ""required"": true, ""description"": ""Enter the domain suffix to be used in the API endpoint, e.g., `example.crtx.us.paloaltonetworks.com`""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""API Key"", ""placeholder"": ""Enter your Palo Alto Xpanse API Key"", ""type"": ""password"", ""name"": ""apiKey"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Xpanse Auth ID"", ""placeholder"": ""Enter your Xpanse Auth ID"", ""type"": ""text"", ""name"": ""xpanseAuthId"", ""required"": true}}]}]}}], ""title"": ""Connect Palo Alto Xpanse to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Cortex%20Xpanse%20CCF/Data%20Connectors/CortexXpanse_ccp/CortexXpanse_ConnectorDefinition.json","true" +"PrismaCloudCompute_CL","Palo Alto Prisma Cloud CWPP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Prisma%20Cloud%20CWPP","azuresentinel","azure-sentinel-solution-prismacloudcompute","2022-06-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","PaloAltoPrismaCloudCWPP","Microsoft","Palo Alto Prisma Cloud CWPP (using REST API)","The [Palo Alto Prisma Cloud CWPP](https://prisma.pan.dev/api/cloud/cwpp/audits/#operation/get-audits-incidents) data connector allows you to connect to your Palo Alto Prisma Cloud CWPP instance and ingesting alerts into Microsoft Sentinel. The data connector is built on Microsoft Sentinel's Codeless Connector Platform and uses the Prisma Cloud API to fetch security events and supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security event data into a custom columns so that queries don't need to parse it again, thus resulting in better performance.","[{""description"": ""To enable the Palo Alto Prisma Cloud CWPP Security Events for Microsoft Sentinel, provide the required information below and click on Connect.\n>"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Path to console"", ""placeholder"": ""europe-west3.cloud.twistlock.com/{sasid}"", ""type"": ""text"", ""name"": ""domainname""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Prisma Access Key (API)"", ""placeholder"": ""Prisma Access Key (API)"", ""type"": ""text"", ""name"": ""username""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Secret"", ""placeholder"": ""Secret"", ""type"": ""password"", ""name"": ""password""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""title"": ""Connect Palo Alto Prisma Cloud CWPP Security Events to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""PrismaCloudCompute API Key"", ""description"": ""A Palo Alto Prisma Cloud CWPP Monitor API username and password is required. [See the documentation to learn more about PrismaCloudCompute SIEM API](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Prisma%20Cloud%20CWPP/Data%20Connectors/readme.md).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Prisma%20Cloud%20CWPP/Data%20Connectors/PaloAltoPrismaCloudCWPP_ccp/connectorDefinition.json","true" +"PrismaCloudCompute_CL","Palo Alto Prisma Cloud CWPP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Prisma%20Cloud%20CWPP","azuresentinel","azure-sentinel-solution-prismacloudcompute","2022-06-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","PrismaCloudComputeNativePoller","Microsoft","Palo Alto Prisma Cloud CWPP (using REST API)","The [Palo Alto Prisma Cloud CWPP](https://prisma.pan.dev/api/cloud/cwpp/audits/#operation/get-audits-incidents) data connector allows you to connect to your Prisma Cloud CWPP instance and ingesting alerts into Microsoft Sentinel. The data connector is built on Microsoft Sentinel’s Codeless Connector Platform and uses the Prisma Cloud API to fetch security events and supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security event data into a custom columns so that queries don't need to parse it again, thus resulting in better performance.","[{""description"": ""To enable the Palo Alto Prisma Cloud CWPP Security Events for Microsoft Sentinel, provide the required information below and click on Connect.\n>"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Path to console"", ""placeholder"": ""https://europe-west3.cloud.twistlock.com/{sasid}"", ""type"": ""text"", ""name"": ""domainname""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Prisma Access Key (API)"", ""placeholder"": ""Prisma Access Key (API)"", ""type"": ""text"", ""name"": ""username""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Secret"", ""placeholder"": ""Secret"", ""type"": ""password"", ""name"": ""password""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""title"": ""Connect Palo Alto Prisma Cloud CWPP Security Events to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""PrismaCloudCompute API Key"", ""description"": ""A Palo Alto Prisma Cloud CWPP Monitor API username and password is required. [See the documentation to learn more about PrismaCloudCompute SIEM API](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Prisma%20Cloud%20CWPP/Data%20Connectors/readme.md).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Palo%20Alto%20Prisma%20Cloud%20CWPP/Data%20Connectors/PrismaCloudCompute_CLV2.json","true" +"CommonSecurityLog","PaloAlto-PAN-OS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAlto-PAN-OS","azuresentinel","azure-sentinel-solution-paloaltopanos","2021-08-09","2021-09-20","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PaloAltoNetworks","Palo Alto Networks","[Deprecated] Palo Alto Networks (Firewall) via Legacy Agent","The Palo Alto Networks firewall connector allows you to easily connect your Palo Alto Networks logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Palo Alto Networks logs to Syslog agent"", ""description"": ""Configure Palo Alto Networks to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent.\n\nGo to [configure Palo Alto Networks NGFW for sending CEF events.](https://aka.ms/sentinel-paloaltonetworks-readme)\n\nGo to [Palo Alto CEF Configuration](https://aka.ms/asi-syslog-paloalto-forwarding) and Palo Alto [Configure Syslog Monitoring](https://aka.ms/asi-syslog-paloalto-configure) steps 2, 3, choose your version, and follow the instructions using the following guidelines:\n\n1. Set the Syslog server format to **BSD**.\n\n2. The copy/paste operations from the PDF might change the text and insert random characters. To avoid this, copy the text to an editor and remove any characters that might break the log format before pasting it.\n\n[Learn more >](https://aka.ms/CEFPaloAlto)""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAlto-PAN-OS/Data%20Connectors/PaloAltoNetworks.json","true" +"CommonSecurityLog","PaloAlto-PAN-OS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAlto-PAN-OS","azuresentinel","azure-sentinel-solution-paloaltopanos","2021-08-09","2021-09-20","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PaloAltoNetworksAma","Palo Alto Networks","[Deprecated] Palo Alto Networks (Firewall) via AMA","The Palo Alto Networks firewall connector allows you to easily connect your Palo Alto Networks logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": """", ""description"": """", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Forward Palo Alto Networks logs to Syslog agent"", ""description"": ""Configure Palo Alto Networks to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent.\n\nGo to [configure Palo Alto Networks NGFW for sending CEF events.](https://aka.ms/sentinel-paloaltonetworks-readme)\n\nGo to [Palo Alto CEF Configuration](https://aka.ms/asi-syslog-paloalto-forwarding) and Palo Alto [Configure Syslog Monitoring](https://aka.ms/asi-syslog-paloalto-configure) steps 2, 3, choose your version, and follow the instructions using the following guidelines:\n\n1. Set the Syslog server format to **BSD**.\n\n2. The copy/paste operations from the PDF might change the text and insert random characters. To avoid this, copy the text to an editor and remove any characters that might break the log format before pasting it.\n\n[Learn more >](https://aka.ms/CEFPaloAlto)"", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAlto-PAN-OS/Data%20Connectors/template_PaloAltoNetworksAMA.json","true" +"CommonSecurityLog","PaloAltoCDL","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoCDL","azuresentinel","azure-sentinel-solution-paloaltocdl","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PaloAltoCDL","Palo Alto Networks","[Deprecated] Palo Alto Networks Cortex Data Lake (CDL) via Legacy Agent","The [Palo Alto Networks CDL](https://www.paloaltonetworks.com/cortex/cortex-data-lake) data connector provides the capability to ingest [CDL logs](https://docs.paloaltonetworks.com/strata-logging-service/log-reference/log-forwarding-schema-overview) into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**PaloAltoCDLEvent**](https://aka.ms/sentinel-paloaltocdl-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Configure Cortex Data Lake to forward logs to a Syslog Server using CEF"", ""description"": ""[Follow the instructions](https://docs.paloaltonetworks.com/cortex/cortex-data-lake/cortex-data-lake-getting-started/get-started-with-log-forwarding-app/forward-logs-from-logging-service-to-syslog-server.html) to configure logs forwarding from Cortex Data Lake to a Syslog Server.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoCDL/Data%20Connectors/Connector_PaloAlto_CDL_CEF.json","true" +"CommonSecurityLog","PaloAltoCDL","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoCDL","azuresentinel","azure-sentinel-solution-paloaltocdl","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PaloAltoCDLAma","Palo Alto Networks","[Deprecated] Palo Alto Networks Cortex Data Lake (CDL) via AMA","The [Palo Alto Networks CDL](https://www.paloaltonetworks.com/cortex/cortex-data-lake) data connector provides the capability to ingest [CDL logs](https://docs.paloaltonetworks.com/strata-logging-service/log-reference/log-forwarding-schema-overview) into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**PaloAltoCDLEvent**](https://aka.ms/sentinel-paloaltocdl-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Configure Cortex Data Lake to forward logs to a Syslog Server using CEF"", ""description"": ""[Follow the instructions](https://docs.paloaltonetworks.com/cortex/cortex-data-lake/cortex-data-lake-getting-started/get-started-with-log-forwarding-app/forward-logs-from-logging-service-to-syslog-server.html) to configure logs forwarding from Cortex Data Lake to a Syslog Server."", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoCDL/Data%20Connectors/template_PaloAlto_CDLAMA.json","true" +"PaloAltoPrismaCloudAlert_CL","PaloAltoPrismaCloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoPrismaCloud","azuresentinel","azure-sentinel-solution-paloaltoprisma","2021-04-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PaloAltoPrismaCloud","Palo Alto","[DEPRECATED] Palo Alto Prisma Cloud CSPM","The Palo Alto Prisma Cloud CSPM data connector provides the capability to ingest [Prisma Cloud CSPM alerts](https://prisma.pan.dev/api/cloud/cspm/alerts#operation/get-alerts) and [audit logs](https://prisma.pan.dev/api/cloud/cspm/audit-logs#operation/rl-audit-logs) into Microsoft sentinel using the Prisma Cloud CSPM API. Refer to [Prisma Cloud CSPM API documentation](https://prisma.pan.dev/api/cloud/cspm) for more information.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Palo Alto Prisma Cloud REST API to pull logs into Microsoft sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**PaloAltoPrismaCloud**](https://aka.ms/sentinel-PaloAltoPrismaCloud-parser) which is deployed with the Microsoft sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration of the Prisma Cloud**\n\nFollow the documentation to [create Prisma Cloud Access Key](https://docs.paloaltonetworks.com/prisma/prisma-cloud/prisma-cloud-admin/manage-prisma-cloud-administrators/create-access-keys.html) and [obtain Prisma Cloud API Url](https://api.docs.prismacloud.io/reference)\n\n NOTE: Please use SYSTEM ADMIN role for giving access to Prisma Cloud API because only SYSTEM ADMIN role is allowed to View Prisma Cloud Audit Logs. Refer to [Prisma Cloud Administrator Permissions (paloaltonetworks.com)](https://docs.paloaltonetworks.com/prisma/prisma-cloud/prisma-cloud-admin/manage-prisma-cloud-administrators/prisma-cloud-admin-permissions) for more details of administrator permissions.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Prisma Cloud data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as Prisma Cloud API credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Prisma Cloud data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-PaloAltoPrismaCloud-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Prisma Cloud API Url**, **Prisma Cloud Access Key ID**, **Prisma Cloud Secret Key**, **Microsoft sentinel Workspace Id**, **Microsoft sentinel Shared Key**\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Prisma Cloud data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-PaloAltoPrismaCloud-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tPrismaCloudAPIUrl\n\t\tPrismaCloudAccessKeyID\n\t\tPrismaCloudSecretKey\n\t\tAzureSentinelWorkspaceId\n\t\tAzureSentinelSharedKey\n\t\tlogAnalyticsUri (Optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://WORKSPACE_ID.ods.opinsights.azure.us`. \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Palo Alto Prisma Cloud API Credentials"", ""description"": ""**Prisma Cloud API Url**, **Prisma Cloud Access Key ID**, **Prisma Cloud Secret Key** are required for Prisma Cloud API connection. See the documentation to learn more about [creating Prisma Cloud Access Key](https://docs.paloaltonetworks.com/prisma/prisma-cloud/prisma-cloud-admin/manage-prisma-cloud-administrators/create-access-keys.html) and about [obtaining Prisma Cloud API Url](https://prisma.pan.dev/api/cloud/api-urls)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoPrismaCloud/Data%20Connectors/PrismaCloud_API_FunctionApp.json","true" +"PaloAltoPrismaCloudAudit_CL","PaloAltoPrismaCloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoPrismaCloud","azuresentinel","azure-sentinel-solution-paloaltoprisma","2021-04-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PaloAltoPrismaCloud","Palo Alto","[DEPRECATED] Palo Alto Prisma Cloud CSPM","The Palo Alto Prisma Cloud CSPM data connector provides the capability to ingest [Prisma Cloud CSPM alerts](https://prisma.pan.dev/api/cloud/cspm/alerts#operation/get-alerts) and [audit logs](https://prisma.pan.dev/api/cloud/cspm/audit-logs#operation/rl-audit-logs) into Microsoft sentinel using the Prisma Cloud CSPM API. Refer to [Prisma Cloud CSPM API documentation](https://prisma.pan.dev/api/cloud/cspm) for more information.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Palo Alto Prisma Cloud REST API to pull logs into Microsoft sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**PaloAltoPrismaCloud**](https://aka.ms/sentinel-PaloAltoPrismaCloud-parser) which is deployed with the Microsoft sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration of the Prisma Cloud**\n\nFollow the documentation to [create Prisma Cloud Access Key](https://docs.paloaltonetworks.com/prisma/prisma-cloud/prisma-cloud-admin/manage-prisma-cloud-administrators/create-access-keys.html) and [obtain Prisma Cloud API Url](https://api.docs.prismacloud.io/reference)\n\n NOTE: Please use SYSTEM ADMIN role for giving access to Prisma Cloud API because only SYSTEM ADMIN role is allowed to View Prisma Cloud Audit Logs. Refer to [Prisma Cloud Administrator Permissions (paloaltonetworks.com)](https://docs.paloaltonetworks.com/prisma/prisma-cloud/prisma-cloud-admin/manage-prisma-cloud-administrators/prisma-cloud-admin-permissions) for more details of administrator permissions.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Prisma Cloud data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as Prisma Cloud API credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Prisma Cloud data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-PaloAltoPrismaCloud-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Prisma Cloud API Url**, **Prisma Cloud Access Key ID**, **Prisma Cloud Secret Key**, **Microsoft sentinel Workspace Id**, **Microsoft sentinel Shared Key**\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Prisma Cloud data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-python) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-PaloAltoPrismaCloud-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tPrismaCloudAPIUrl\n\t\tPrismaCloudAccessKeyID\n\t\tPrismaCloudSecretKey\n\t\tAzureSentinelWorkspaceId\n\t\tAzureSentinelSharedKey\n\t\tlogAnalyticsUri (Optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://WORKSPACE_ID.ods.opinsights.azure.us`. \n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Palo Alto Prisma Cloud API Credentials"", ""description"": ""**Prisma Cloud API Url**, **Prisma Cloud Access Key ID**, **Prisma Cloud Secret Key** are required for Prisma Cloud API connection. See the documentation to learn more about [creating Prisma Cloud Access Key](https://docs.paloaltonetworks.com/prisma/prisma-cloud/prisma-cloud-admin/manage-prisma-cloud-administrators/create-access-keys.html) and about [obtaining Prisma Cloud API Url](https://prisma.pan.dev/api/cloud/api-urls)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoPrismaCloud/Data%20Connectors/PrismaCloud_API_FunctionApp.json","true" +"PaloAltoPrismaCloudAlertV2_CL","PaloAltoPrismaCloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoPrismaCloud","azuresentinel","azure-sentinel-solution-paloaltoprisma","2021-04-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PaloAltoPrismaCloudCSPMCCPDefinition","Microsoft","Palo Alto Prisma Cloud CSPM (via Codeless Connector Framework)","The Palo Alto Prisma Cloud CSPM data connector allows you to connect to your Palo Alto Prisma Cloud CSPM instance and ingesting Alerts (https://pan.dev/prisma-cloud/api/cspm/alerts/) & Audit Logs(https://pan.dev/prisma-cloud/api/cspm/audit-logs/) into Microsoft Sentinel.","[{""description"": ""To get more information on how to obtain the Prisma Cloud Access Key, Secret Key, and Base URL, please refer to the[connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoPrismaCloud/Data%20Connectors/Readme.md), provide the required information below and click on Connect.\n>"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Prisma Cloud Access Key"", ""placeholder"": ""Enter Access Key"", ""type"": ""text"", ""name"": ""username""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Prisma Cloud Secret Key"", ""placeholder"": ""Enter Secret Key"", ""type"": ""password"", ""name"": ""password""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Prisma Cloud Base URL"", ""placeholder"": ""https://api2.eu.prismacloud.io"", ""type"": ""text"", ""name"": ""baseurl""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""PaloAltoPrismaCloudCSPM Api Endpoints"", ""columnValue"": ""properties.request.apiEndpoint""}]}}], ""title"": ""Connect Palo Alto Prisma Cloud CSPM Events to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoPrismaCloud/Data%20Connectors/PrismaCloudCSPMLog_CCF/PaloAltoPrismaCloudCSPMLog_ConnectorDefinition.json","true" +"PaloAltoPrismaCloudAuditV2_CL","PaloAltoPrismaCloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoPrismaCloud","azuresentinel","azure-sentinel-solution-paloaltoprisma","2021-04-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PaloAltoPrismaCloudCSPMCCPDefinition","Microsoft","Palo Alto Prisma Cloud CSPM (via Codeless Connector Framework)","The Palo Alto Prisma Cloud CSPM data connector allows you to connect to your Palo Alto Prisma Cloud CSPM instance and ingesting Alerts (https://pan.dev/prisma-cloud/api/cspm/alerts/) & Audit Logs(https://pan.dev/prisma-cloud/api/cspm/audit-logs/) into Microsoft Sentinel.","[{""description"": ""To get more information on how to obtain the Prisma Cloud Access Key, Secret Key, and Base URL, please refer to the[connector tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoPrismaCloud/Data%20Connectors/Readme.md), provide the required information below and click on Connect.\n>"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Prisma Cloud Access Key"", ""placeholder"": ""Enter Access Key"", ""type"": ""text"", ""name"": ""username""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Prisma Cloud Secret Key"", ""placeholder"": ""Enter Secret Key"", ""type"": ""password"", ""name"": ""password""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Prisma Cloud Base URL"", ""placeholder"": ""https://api2.eu.prismacloud.io"", ""type"": ""text"", ""name"": ""baseurl""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""PaloAltoPrismaCloudCSPM Api Endpoints"", ""columnValue"": ""properties.request.apiEndpoint""}]}}], ""title"": ""Connect Palo Alto Prisma Cloud CSPM Events to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PaloAltoPrismaCloud/Data%20Connectors/PrismaCloudCSPMLog_CCF/PaloAltoPrismaCloudCSPMLog_ConnectorDefinition.json","true" +"ABAPAuditLog","Pathlock_TDnR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Pathlock_TDnR","pathlockinc1631410274035","pathlock_tdnr","2022-02-17","","","Pathlock Inc.","Partner","https://pathlock.com/support/","","domains,verticals","Pathlock_TDnR","Pathlock Inc.","Pathlock Inc.: Threat Detection and Response for SAP","The [Pathlock Threat Detection and Response (TD&R)](https://pathlock.com/products/cybersecurity-application-controls/) integration with **Microsoft Sentinel Solution for SAP** delivers unified, real-time visibility into SAP security events, enabling organizations to detect and act on threats across all SAP landscapes. This out-of-the-box integration allows Security Operations Centers (SOCs) to correlate SAP-specific alerts with enterprise-wide telemetry, creating actionable intelligence that connects IT security with business processes.

Pathlock’s connector is purpose-built for SAP and forwards only **security-relevant events by default**, minimizing data volume and noise while maintaining the flexibility to forward all log sources when needed. Each event is enriched with **business process context**, allowing Microsoft Sentinel Solution for SAP analytics to distinguish operational patterns from real threats and to prioritize what truly matters.

This precision-driven approach helps security teams drastically reduce false positives, focus investigations, and accelerate **mean time to detect (MTTD)** and **mean time to respond (MTTR)**. Pathlock’s library consists of more than 1,500 SAP-specific detection signatures across 70+ log sources, the solution uncovers complex attack behaviors, configuration weaknesses, and access anomalies.

By combining business-context intelligence with advanced analytics, Pathlock enables enterprises to strengthen detection accuracy, streamline response actions, and maintain continuous control across their SAP environments—without adding complexity or redundant monitoring layers.","[{""title"": ""1. Create ARM Resources and Provide the Required Permissions"", ""description"": ""We will create data collection rule (DCR) and data collection endpoint (DCE) resources. We will also create a Microsoft Entra app registration and assign the required permissions to it."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Automated deployment of Azure resources\nClicking on \""Deploy push connector resources\"" will trigger the creation of DCR and DCE resources.\nIt will then create a Microsoft Entra app registration with client secret and grant permissions on the DCR. This setup enables data to be sent securely to the DCR using a OAuth v2 client credentials.""}}, {""parameters"": {""label"": ""Deploy push connector resources"", ""applicationDisplayName"": ""Pathlock Inc. Threat Detection and Response for SAP""}, ""type"": ""DeployPushConnectorButton_test""}]}, {""title"": ""2. Maintain the data collection endpoint details and authentication info in your central instance of Pathlock's Cybersecurity Application Controls: Threat Detection and Response"", ""description"": ""Share the data collection endpoint URL and authentication info with the Pathlock administrator to configure the plug and play forwarding in Threat Detection and Response to send data to the data collection endpoint.\nPlease do not hesitate to contact Pathlock if support is needed.\n\n"", ""instructions"": [{""parameters"": {""label"": ""Use this value to configure as Tenant ID in the LogIngestionAPI credential."", ""fillWith"": [""TenantId""]}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra Application ID"", ""fillWith"": [""ApplicationId""], ""placeholder"": ""Deploy push connector to get the Application ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra Application Secret"", ""fillWith"": [""ApplicationSecret""], ""placeholder"": ""Deploy push connector to get the Application Secret""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Use this value to configure the LogsIngestionURL parameter when deploying the IFlow."", ""fillWith"": [""DataCollectionEndpoint""], ""placeholder"": ""Deploy push connector to get the DCE URI""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""DCR Immutable ID"", ""fillWith"": [""DataCollectionRuleId""], ""placeholder"": ""Deploy push connector to get the DCR ID""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft Entra"", ""description"": ""Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher.""}, {""name"": ""Microsoft Azure"", ""description"": ""Permission to assign Monitoring Metrics Publisher role on data collection rules. Typically requires Azure RBAC Owner or User Access Administrator role.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Pathlock_TDnR/Data%20Connectors/Pathlock_TDnR_PUSH_CCP/Pathlock_TDnR_connectorDefinition.json","true" +"Pathlock_TDnR_CL","Pathlock_TDnR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Pathlock_TDnR","pathlockinc1631410274035","pathlock_tdnr","2022-02-17","","","Pathlock Inc.","Partner","https://pathlock.com/support/","","domains,verticals","Pathlock_TDnR","Pathlock Inc.","Pathlock Threat Detection and Response Integration","Pathlock Threat Detection and Response enables seamless forwarding of security alerts and logs detected and collected by the Pathlock Platform into Microsoft Sentinel Solution for SAP.","[{""title"": ""1. Create ARM Resources and Provide the Required Permissions"", ""description"": ""We will create data collection rule (DCR) and data collection endpoint (DCE) resources. We will also create a Microsoft Entra app registration and assign the required permissions to it."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Automated deployment of Azure resources\nClicking on \""Deploy push connector resources\"" will trigger the creation of DCR and DCE resources.\nIt will then create a Microsoft Entra app registration with client secret and grant permissions on the DCR. This setup enables data to be sent securely to the DCR using a OAuth v2 client credentials.""}}, {""parameters"": {""label"": ""Deploy push connector resources"", ""applicationDisplayName"": ""Pathlock Threat Detection and Response forwarding to Microsoft Sentinel Solution for SAP""}, ""type"": ""DeployPushConnectorButton_test""}]}, {""title"": ""2. Maintain the data collection endpoint details and authentication info in Pathlock Threat Detection and Response"", ""description"": ""Share the data collection endpoint URL and authentication info with the Pathlock Threat Detection and Response Integration administrator to configure the Integration."", ""instructions"": [{""parameters"": {""label"": ""Use this value to configure as Tenant ID in the LogIngestionAPI credential."", ""fillWith"": [""TenantId""]}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra Application ID"", ""fillWith"": [""ApplicationId""], ""placeholder"": ""Deploy push connector to get the Application ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra Application Secret"", ""fillWith"": [""ApplicationSecret""], ""placeholder"": ""Deploy push connector to get the Application Secret""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Use this value to configure the LogsIngestionURL parameter."", ""fillWith"": [""DataCollectionEndpoint""], ""placeholder"": ""Deploy push connector to get the DCE URI""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""DCR Immutable ID"", ""fillWith"": [""DataCollectionRuleId""], ""placeholder"": ""Deploy push connector to get the DCR ID""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft Entra"", ""description"": ""Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher.""}, {""name"": ""Microsoft Azure"", ""description"": ""Permission to assign Monitoring Metrics Publisher role on data collection rules. Typically requires Azure RBAC Owner or User Access Administrator role.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Pathlock_TDnR/Data%20Connectors/Pathlock_TDnR.json","true" +"Perimeter81_CL","Perimeter 81","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Perimeter%2081","perimeter811605117499319","perimeter_81___mss","2022-05-06","","","Perimeter 81","Partner","https://support.perimeter81.com/docs","","domains","Perimeter81ActivityLogs","Perimeter 81","Perimeter 81 Activity Logs","The Perimeter 81 Activity Logs connector allows you to easily connect your Perimeter 81 activity logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation.","[{""title"": """", ""description"": ""Please note the values below and follow the instructions here to connect your Perimeter 81 activity logs with Microsoft Sentinel."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Perimeter%2081/Data%20Connectors/Perimeter81ActivityLogs.json","true" +"Phosphorus_CL","Phosphorus","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Phosphorus","4043","microsoft-sentinel-solution-phosphorus","2024-08-13","2024-08-13","","Phosphorus Inc.","Partner","https://phosphorus.io","","domains","Phosphorus_Polling","Phosphorus Inc.","Phosphorus Devices","The Phosphorus Device Connector provides the capability to Phosphorus to ingest device data logs into Microsoft Sentinel through the Phosphorus REST API. The Connector provides visibility into the devices enrolled in Phosphorus. This Data Connector pulls devices information along with its corresponding alerts.","[{""description"": ""**STEP 1 - Configuration steps for the Phosphorus API**\n\n Follow these instructions to create a Phosphorus API key.\n 1. Log into your Phosphorus instance\n 2. Navigate to Settings -> API \n 3. If the API key has not already been created, press the **Add button** to create the API key\n 4. The API key can now be copied and used during the Phosphorus Device connector configuration""}, {""title"": ""Connect the Phosphorus Application with Microsoft Sentinel"", ""description"": ""**STEP 2 - Fill in the details below**\n\n>**IMPORTANT:** Before deploying the Phosphorus Device data connector, have the Phosphorus Instance Domain Name readily available as well as the Phosphorus API Key(s)"", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""Domain Name"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{urlPlaceHolder}}"", ""placeHolderValue"": """"}, {""displayText"": ""Integration Name"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{integrationName}}"", ""placeHolderValue"": """"}]}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true, ""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""REST API Credentials/permissions"", ""description"": ""**Phosphorus API Key** is required. Please make sure that the API Key associated with the User has the Manage Settings permissions enabled.\n\n Follow these instructions to enable Manage Settings permissions.\n 1. Log in to the Phosphorus Application\n 2. Go to 'Settings' -> 'Groups'\n 3. Select the Group the Integration user is a part of\n 4. Navigate to 'Product Actions' -> toggle on the 'Manage Settings' permission. ""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Phosphorus/Data%20Connectors/PhosphorusDataConnector.json","true" +"CommonSecurityLog","PingFederate","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PingFederate","azuresentinel","azure-sentinel-solution-pingfederate","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PingFederate","Ping Identity","[Deprecated] PingFederate via Legacy Agent","The [PingFederate](https://www.pingidentity.com/en/software/pingfederate.html) data connector provides the capability to ingest [PingFederate events](https://docs.pingidentity.com/bundle/pingfederate-102/page/lly1564002980532.html) into Microsoft Sentinel. Refer to [PingFederate documentation](https://docs.pingidentity.com/bundle/pingfederate-102/page/tle1564002955874.html) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**PingFederateEvent**](https://aka.ms/sentinel-PingFederate-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""[Follow these steps](https://docs.pingidentity.com/bundle/pingfederate-102/page/gsn1564002980953.html) to configure PingFederate sending audit log via syslog in CEF format.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PingFederate/Data%20Connectors/Connector_CEF_PingFederate.json","true" +"CommonSecurityLog","PingFederate","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PingFederate","azuresentinel","azure-sentinel-solution-pingfederate","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PingFederateAma","Ping Identity","[Deprecated] PingFederate via AMA","The [PingFederate](https://www.pingidentity.com/en/software/pingfederate.html) data connector provides the capability to ingest [PingFederate events](https://docs.pingidentity.com/bundle/pingfederate-102/page/lly1564002980532.html) into Microsoft Sentinel. Refer to [PingFederate documentation](https://docs.pingidentity.com/bundle/pingfederate-102/page/tle1564002955874.html) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**PingFederateEvent**](https://aka.ms/sentinel-PingFederate-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""[Follow these steps](https://docs.pingidentity.com/bundle/pingfederate-102/page/gsn1564002980953.html) to configure PingFederate sending audit log via syslog in CEF format."", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PingFederate/Data%20Connectors/template_PingFederateAMA.json","true" +"PingOne_AuditActivitiesV2_CL","PingOne","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PingOne","azuresentinel","azure-sentinel-pingone","2025-04-20","2025-04-20","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PingOneAuditLogsCCPDefinition","Microsoft","Ping One (via Codeless Connector Framework)","This connector ingests **audit activity logs** from the PingOne Identity platform into Microsoft Sentinel using a Codeless Connector Framework.","[{""title"": ""Connect Ping One connector to Microsoft Sentinel"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""Before connecting to PingOne, ensure the following prerequisites are completed. Refer to the [document](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PingOne/README.md) for detailed setup instructions, including how to obtain client credentials and the environment ID.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Client Credentials \n You'll need client credentials, including your client id and client secret.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Environment Id \n To generate token and gather logs from audit activities endpoint""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Environment ID"", ""columnValue"": ""properties.addOnAttributes.EnvironmentId""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add domain"", ""title"": ""Add domain"", ""subtitle"": ""Add domain"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Client ID"", ""placeholder"": ""Enter ID of the client"", ""type"": ""text"", ""name"": ""clientId"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Client Secret"", ""placeholder"": ""Enter your secret key"", ""type"": ""password"", ""name"": ""clientSecret"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Environment ID"", ""placeholder"": ""Enter your environment Id "", ""type"": ""text"", ""name"": ""environmentId"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Api domain"", ""placeholder"": ""Enter your Api domain Eg.( pingone.com,pingone.eu etc )depending on the region credentials created for "", ""type"": ""text"", ""name"": ""apidomain"", ""required"": true}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": false, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PingOne/Data%20Connectors/PingOneAuditLogs_ccp/PingOneAuditLogs_DataConnectorDefinition.json","true" +"PostgreSQL_CL","PostgreSQL","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PostgreSQL","azuresentinel","azure-sentinel-solution-postgresql","2022-06-27","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PostgreSQL","PostgreSQL","[Deprecated] PostgreSQL Events","PostgreSQL data connector provides the capability to ingest [PostgreSQL](https://www.postgresql.org/) events into Microsoft Sentinel. Refer to [PostgreSQL documentation](https://www.postgresql.org/docs/current/index.html) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on PostgreSQL parser based on a Kusto Function to work as expected. This parser is installed along with solution installation."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Tomcat Server where the logs are generated.\n\n> Logs from PostgreSQL Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure PostgreSQL to write logs to files"", ""description"": ""1. Edit postgresql.conf file to write logs to files:\n\n>**log_destination** = 'stderr'\n\n>**logging_collector** = on\n\nSet the following parameters: **log_directory** and **log_filename**. Refer to the [PostgreSQL documentation for more details](https://www.postgresql.org/docs/current/runtime-config-logging.html)""}, {""title"": ""3. Configure the logs to be collected"", ""description"": ""Configure the custom log directory to be collected"", ""instructions"": [{""parameters"": {""linkType"": ""OpenCustomLogsSettings""}, ""type"": ""InstallAgent""}]}, {""title"": """", ""description"": ""1. Select the link above to open your workspace advanced settings \n2. From the left pane, select **Settings**, select **Custom Logs** and click **+Add custom log**\n3. Click **Browse** to upload a sample of a PostgreSQL log file. Then, click **Next >**\n4. Select **Timestamp** as the record delimiter and click **Next >**\n5. Select **Windows** or **Linux** and enter the path to PostgreSQL logs based on your configuration(e.g. for some Linux distros the default path is /var/log/postgresql/) \n6. After entering the path, click the '+' symbol to apply, then click **Next >** \n7. Add **PostgreSQL** as the custom log Name (the '_CL' suffix will be added automatically) and click **Done**.""}, {""title"": ""Validate connectivity"", ""description"": ""It may take upwards of 20 minutes until your logs start to appear in Microsoft Sentinel.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/PostgreSQL/Data%20Connectors/Connector_PostgreSQL.json","true" +"","Power Platform","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Power%20Platform","","","","","","","","","","","","","","","","","","false" +"prancer_CL","Prancer PenSuiteAI Integration","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Prancer%20PenSuiteAI%20Integration","prancerenterprise1600813133757","microsoft-sentinel-solution-prancer","2023-08-02","","","Prancer PenSuiteAI Integration","Partner","https://www.prancer.io","","domains","PrancerLogData","Prancer","Prancer Data Connector","The Prancer Data Connector has provides the capability to ingest Prancer (CSPM)[https://docs.prancer.io/web/CSPM/] and [PAC](https://docs.prancer.io/web/PAC/introduction/) data to process through Microsoft Sentinel. Refer to [Prancer Documentation](https://docs.prancer.io/web) for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Prancer REST API to pull logs into Microsoft sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": ""STEP 1: Follow the documentation on the [Prancer Documentation Site](https://docs.prancer.io/web/) in order to set up an scan with an azure cloud connector.""}, {""title"": """", ""description"": ""STEP 2: Once the scan is created go to the 'Third Part Integrations' menu for the scan and select Sentinel.""}, {""title"": """", ""description"": ""STEP 3: Create follow the configuration wizard to select where in Azure the results should be sent to.""}, {""title"": """", ""description"": ""STEP 4: Data should start to get fed into Microsoft Sentinel for processing.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Include custom pre-requisites if the connectivity requires - else delete customs"", ""description"": ""Description for any custom pre-requisite""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Prancer%20PenSuiteAI%20Integration/Data%20Connectors/PrancerLogData.json","true" +"ProofPointTAPClicksBlocked_CL","ProofPointTap","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap","proofpointinc1600438591120","azure-sentinel-proofpoint","2022-05-23","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointTAP","Proofpoint","[Deprecated] Proofpoint TAP","The [Proofpoint Targeted Attack Protection (TAP)](https://www.proofpoint.com/us/products/advanced-threat-protection/targeted-attack-protection) connector provides the capability to ingest Proofpoint TAP logs and events into Microsoft Sentinel. The connector provides visibility into Message and Click events in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.

NOTE: This data connector has been deprecated, consider moving to the CCP data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to Proofpoint TAP to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Proofpoint TAP API**\n\n1. Log into the Proofpoint TAP console \n2. Navigate to **Connect Applications** and select **Service Principal**\n3. Create a **Service Principal** (API Authorization Key)""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Proofpoint TAP connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Proofpoint TAP API Authorization Key(s), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Proofpoint TAP connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelproofpointtapazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelproofpointtapazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **API Username**, **API Password**, and validate the **Uri**.\n> - The default URI is pulling data for the last 300 seconds (5 minutes) to correspond with the default Function App Timer trigger of 5 minutes. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion. \n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""This method provides the step-by-step instructions to deploy the Proofpoint TAP connector manually with Azure Function (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the [Azure Function App](https://aka.ms/sentinelproofpointtapazurefunctionzip) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following six (6) application settings individually, with their respective string values (case-sensitive): \n\t\tapiUsername\n\t\tapipassword\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\turi\n\t\tlogAnalyticsUri (optional)\n> - Set the `uri` value to: `https://tap-api-v2.proofpoint.com/v2/siem/all?format=json&sinceSeconds=300`\n> - The default URI is pulling data for the last 300 seconds (5 minutes) to correspond with the default Function App Timer trigger of 5 minutes. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly to prevent overlapping data ingestion.\n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`\n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Proofpoint TAP API Key"", ""description"": ""A Proofpoint TAP API username and password is required. [See the documentation to learn more about Proofpoint SIEM API](https://help.proofpoint.com/Threat_Insight_Dashboard/API_Documentation/SIEM_API).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap/Data%20Connectors/ProofpointTAP_API_FunctionApp.json","true" +"ProofPointTAPClicksPermitted_CL","ProofPointTap","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap","proofpointinc1600438591120","azure-sentinel-proofpoint","2022-05-23","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointTAP","Proofpoint","[Deprecated] Proofpoint TAP","The [Proofpoint Targeted Attack Protection (TAP)](https://www.proofpoint.com/us/products/advanced-threat-protection/targeted-attack-protection) connector provides the capability to ingest Proofpoint TAP logs and events into Microsoft Sentinel. The connector provides visibility into Message and Click events in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.

NOTE: This data connector has been deprecated, consider moving to the CCP data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to Proofpoint TAP to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Proofpoint TAP API**\n\n1. Log into the Proofpoint TAP console \n2. Navigate to **Connect Applications** and select **Service Principal**\n3. Create a **Service Principal** (API Authorization Key)""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Proofpoint TAP connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Proofpoint TAP API Authorization Key(s), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Proofpoint TAP connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelproofpointtapazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelproofpointtapazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **API Username**, **API Password**, and validate the **Uri**.\n> - The default URI is pulling data for the last 300 seconds (5 minutes) to correspond with the default Function App Timer trigger of 5 minutes. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion. \n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""This method provides the step-by-step instructions to deploy the Proofpoint TAP connector manually with Azure Function (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the [Azure Function App](https://aka.ms/sentinelproofpointtapazurefunctionzip) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following six (6) application settings individually, with their respective string values (case-sensitive): \n\t\tapiUsername\n\t\tapipassword\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\turi\n\t\tlogAnalyticsUri (optional)\n> - Set the `uri` value to: `https://tap-api-v2.proofpoint.com/v2/siem/all?format=json&sinceSeconds=300`\n> - The default URI is pulling data for the last 300 seconds (5 minutes) to correspond with the default Function App Timer trigger of 5 minutes. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly to prevent overlapping data ingestion.\n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`\n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Proofpoint TAP API Key"", ""description"": ""A Proofpoint TAP API username and password is required. [See the documentation to learn more about Proofpoint SIEM API](https://help.proofpoint.com/Threat_Insight_Dashboard/API_Documentation/SIEM_API).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap/Data%20Connectors/ProofpointTAP_API_FunctionApp.json","true" +"ProofPointTAPMessagesBlocked_CL","ProofPointTap","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap","proofpointinc1600438591120","azure-sentinel-proofpoint","2022-05-23","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointTAP","Proofpoint","[Deprecated] Proofpoint TAP","The [Proofpoint Targeted Attack Protection (TAP)](https://www.proofpoint.com/us/products/advanced-threat-protection/targeted-attack-protection) connector provides the capability to ingest Proofpoint TAP logs and events into Microsoft Sentinel. The connector provides visibility into Message and Click events in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.

NOTE: This data connector has been deprecated, consider moving to the CCP data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to Proofpoint TAP to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Proofpoint TAP API**\n\n1. Log into the Proofpoint TAP console \n2. Navigate to **Connect Applications** and select **Service Principal**\n3. Create a **Service Principal** (API Authorization Key)""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Proofpoint TAP connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Proofpoint TAP API Authorization Key(s), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Proofpoint TAP connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelproofpointtapazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelproofpointtapazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **API Username**, **API Password**, and validate the **Uri**.\n> - The default URI is pulling data for the last 300 seconds (5 minutes) to correspond with the default Function App Timer trigger of 5 minutes. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion. \n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""This method provides the step-by-step instructions to deploy the Proofpoint TAP connector manually with Azure Function (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the [Azure Function App](https://aka.ms/sentinelproofpointtapazurefunctionzip) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following six (6) application settings individually, with their respective string values (case-sensitive): \n\t\tapiUsername\n\t\tapipassword\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\turi\n\t\tlogAnalyticsUri (optional)\n> - Set the `uri` value to: `https://tap-api-v2.proofpoint.com/v2/siem/all?format=json&sinceSeconds=300`\n> - The default URI is pulling data for the last 300 seconds (5 minutes) to correspond with the default Function App Timer trigger of 5 minutes. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly to prevent overlapping data ingestion.\n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`\n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Proofpoint TAP API Key"", ""description"": ""A Proofpoint TAP API username and password is required. [See the documentation to learn more about Proofpoint SIEM API](https://help.proofpoint.com/Threat_Insight_Dashboard/API_Documentation/SIEM_API).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap/Data%20Connectors/ProofpointTAP_API_FunctionApp.json","true" +"ProofPointTAPMessagesDelivered_CL","ProofPointTap","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap","proofpointinc1600438591120","azure-sentinel-proofpoint","2022-05-23","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointTAP","Proofpoint","[Deprecated] Proofpoint TAP","The [Proofpoint Targeted Attack Protection (TAP)](https://www.proofpoint.com/us/products/advanced-threat-protection/targeted-attack-protection) connector provides the capability to ingest Proofpoint TAP logs and events into Microsoft Sentinel. The connector provides visibility into Message and Click events in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.

NOTE: This data connector has been deprecated, consider moving to the CCP data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to Proofpoint TAP to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Proofpoint TAP API**\n\n1. Log into the Proofpoint TAP console \n2. Navigate to **Connect Applications** and select **Service Principal**\n3. Create a **Service Principal** (API Authorization Key)""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Proofpoint TAP connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Proofpoint TAP API Authorization Key(s), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Proofpoint TAP connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelproofpointtapazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelproofpointtapazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **API Username**, **API Password**, and validate the **Uri**.\n> - The default URI is pulling data for the last 300 seconds (5 minutes) to correspond with the default Function App Timer trigger of 5 minutes. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion. \n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""This method provides the step-by-step instructions to deploy the Proofpoint TAP connector manually with Azure Function (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the [Azure Function App](https://aka.ms/sentinelproofpointtapazurefunctionzip) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following six (6) application settings individually, with their respective string values (case-sensitive): \n\t\tapiUsername\n\t\tapipassword\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\turi\n\t\tlogAnalyticsUri (optional)\n> - Set the `uri` value to: `https://tap-api-v2.proofpoint.com/v2/siem/all?format=json&sinceSeconds=300`\n> - The default URI is pulling data for the last 300 seconds (5 minutes) to correspond with the default Function App Timer trigger of 5 minutes. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly to prevent overlapping data ingestion.\n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`\n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Proofpoint TAP API Key"", ""description"": ""A Proofpoint TAP API username and password is required. [See the documentation to learn more about Proofpoint SIEM API](https://help.proofpoint.com/Threat_Insight_Dashboard/API_Documentation/SIEM_API).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap/Data%20Connectors/ProofpointTAP_API_FunctionApp.json","true" +"ProofPointTAPClicksBlockedV2_CL","ProofPointTap","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap","proofpointinc1600438591120","azure-sentinel-proofpoint","2022-05-23","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointTAPv2","Proofpoint","Proofpoint TAP (via Codeless Connector Platform)","The [Proofpoint Targeted Attack Protection (TAP)](https://www.proofpoint.com/us/products/advanced-threat-protection/targeted-attack-protection) connector provides the capability to ingest Proofpoint TAP logs and events into Microsoft Sentinel. The connector provides visibility into Message and Click events in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.","[{""description"": ""**Configuration steps for the Proofpoint TAP API**\n\n1. Log into the [Proofpoint TAP dashboard](https://threatinsight.proofpoint.com/) \n2. Navigate to **Settings** and go to **Connected Applications** tab \n 3. Click on **Create New Credential** \n 4. Provide a name and click **Generate** \n 5. Copy **Service Principal** and **Secret** values""}, {""description"": "">**NOTE:** This connector depends on a parser based on Kusto Function to work as expected [**ProofpointTAPEvent**](https://aka.ms/sentinel-ProofpointTAPDataConnector-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Service Principal"", ""placeholder"": ""123456"", ""type"": ""text"", ""name"": ""username""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Secret"", ""placeholder"": ""123456"", ""type"": ""password"", ""name"": ""password""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""disconnectLabel"": ""Disconnect"", ""name"": ""connectionToggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Proofpoint TAP API Key"", ""description"": ""A Proofpoint TAP API service principal and secret is required to access Proofpoint's SIEM API. [See the documentation to learn more about Proofpoint SIEM API](https://help.proofpoint.com/Threat_Insight_Dashboard/API_Documentation/SIEM_API).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap/Data%20Connectors/ProofpointTAP_CCP/ProofpointTAP_defination.json","true" +"ProofPointTAPClicksPermittedV2_CL","ProofPointTap","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap","proofpointinc1600438591120","azure-sentinel-proofpoint","2022-05-23","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointTAPv2","Proofpoint","Proofpoint TAP (via Codeless Connector Platform)","The [Proofpoint Targeted Attack Protection (TAP)](https://www.proofpoint.com/us/products/advanced-threat-protection/targeted-attack-protection) connector provides the capability to ingest Proofpoint TAP logs and events into Microsoft Sentinel. The connector provides visibility into Message and Click events in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.","[{""description"": ""**Configuration steps for the Proofpoint TAP API**\n\n1. Log into the [Proofpoint TAP dashboard](https://threatinsight.proofpoint.com/) \n2. Navigate to **Settings** and go to **Connected Applications** tab \n 3. Click on **Create New Credential** \n 4. Provide a name and click **Generate** \n 5. Copy **Service Principal** and **Secret** values""}, {""description"": "">**NOTE:** This connector depends on a parser based on Kusto Function to work as expected [**ProofpointTAPEvent**](https://aka.ms/sentinel-ProofpointTAPDataConnector-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Service Principal"", ""placeholder"": ""123456"", ""type"": ""text"", ""name"": ""username""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Secret"", ""placeholder"": ""123456"", ""type"": ""password"", ""name"": ""password""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""disconnectLabel"": ""Disconnect"", ""name"": ""connectionToggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Proofpoint TAP API Key"", ""description"": ""A Proofpoint TAP API service principal and secret is required to access Proofpoint's SIEM API. [See the documentation to learn more about Proofpoint SIEM API](https://help.proofpoint.com/Threat_Insight_Dashboard/API_Documentation/SIEM_API).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap/Data%20Connectors/ProofpointTAP_CCP/ProofpointTAP_defination.json","true" +"ProofPointTAPMessagesBlockedV2_CL","ProofPointTap","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap","proofpointinc1600438591120","azure-sentinel-proofpoint","2022-05-23","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointTAPv2","Proofpoint","Proofpoint TAP (via Codeless Connector Platform)","The [Proofpoint Targeted Attack Protection (TAP)](https://www.proofpoint.com/us/products/advanced-threat-protection/targeted-attack-protection) connector provides the capability to ingest Proofpoint TAP logs and events into Microsoft Sentinel. The connector provides visibility into Message and Click events in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.","[{""description"": ""**Configuration steps for the Proofpoint TAP API**\n\n1. Log into the [Proofpoint TAP dashboard](https://threatinsight.proofpoint.com/) \n2. Navigate to **Settings** and go to **Connected Applications** tab \n 3. Click on **Create New Credential** \n 4. Provide a name and click **Generate** \n 5. Copy **Service Principal** and **Secret** values""}, {""description"": "">**NOTE:** This connector depends on a parser based on Kusto Function to work as expected [**ProofpointTAPEvent**](https://aka.ms/sentinel-ProofpointTAPDataConnector-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Service Principal"", ""placeholder"": ""123456"", ""type"": ""text"", ""name"": ""username""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Secret"", ""placeholder"": ""123456"", ""type"": ""password"", ""name"": ""password""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""disconnectLabel"": ""Disconnect"", ""name"": ""connectionToggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Proofpoint TAP API Key"", ""description"": ""A Proofpoint TAP API service principal and secret is required to access Proofpoint's SIEM API. [See the documentation to learn more about Proofpoint SIEM API](https://help.proofpoint.com/Threat_Insight_Dashboard/API_Documentation/SIEM_API).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap/Data%20Connectors/ProofpointTAP_CCP/ProofpointTAP_defination.json","true" +"ProofPointTAPMessagesDeliveredV2_CL","ProofPointTap","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap","proofpointinc1600438591120","azure-sentinel-proofpoint","2022-05-23","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointTAPv2","Proofpoint","Proofpoint TAP (via Codeless Connector Platform)","The [Proofpoint Targeted Attack Protection (TAP)](https://www.proofpoint.com/us/products/advanced-threat-protection/targeted-attack-protection) connector provides the capability to ingest Proofpoint TAP logs and events into Microsoft Sentinel. The connector provides visibility into Message and Click events in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.","[{""description"": ""**Configuration steps for the Proofpoint TAP API**\n\n1. Log into the [Proofpoint TAP dashboard](https://threatinsight.proofpoint.com/) \n2. Navigate to **Settings** and go to **Connected Applications** tab \n 3. Click on **Create New Credential** \n 4. Provide a name and click **Generate** \n 5. Copy **Service Principal** and **Secret** values""}, {""description"": "">**NOTE:** This connector depends on a parser based on Kusto Function to work as expected [**ProofpointTAPEvent**](https://aka.ms/sentinel-ProofpointTAPDataConnector-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Service Principal"", ""placeholder"": ""123456"", ""type"": ""text"", ""name"": ""username""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Secret"", ""placeholder"": ""123456"", ""type"": ""password"", ""name"": ""password""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""disconnectLabel"": ""Disconnect"", ""name"": ""connectionToggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Proofpoint TAP API Key"", ""description"": ""A Proofpoint TAP API service principal and secret is required to access Proofpoint's SIEM API. [See the documentation to learn more about Proofpoint SIEM API](https://help.proofpoint.com/Threat_Insight_Dashboard/API_Documentation/SIEM_API).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ProofPointTap/Data%20Connectors/ProofpointTAP_CCP/ProofpointTAP_defination.json","true" +"ProofpointPODMailLog_CL","Proofpoint On demand(POD) Email Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Proofpoint%20On%20demand%28POD%29%20Email%20Security","proofpointinc1600438591120","azure-sentinel-proofpointpod","2021-03-31","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointCCPDefinition","Proofpoint","Proofpoint On Demand Email Security (via Codeless Connector Platform)","Proofpoint On Demand Email Security data connector provides the capability to get Proofpoint on Demand Email Protection data, allows users to check message traceability, monitoring into email activity, threats,and data exfiltration by attackers and malicious insiders. The connector provides ability to review events in your org on an accelerated basis, get event log files in hourly increments for recent activity.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the Proofpoint POD Websocket API \n #### The PoD Log API does not allow use of the same token for more than one session at the same time, so make sure your token isn't used anywhere. \n Proofpoint Websocket API service requires Remote Syslog Forwarding license. Please refer the [documentation](https://proofpointcommunities.force.com/community/s/article/Proofpoint-on-Demand-Pod-Log-API) on how to enable and check PoD Log API. \n You must provide your cluster id and security token.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve the cluster id\n 1.1. Log in to the [proofpoint](https://admin.proofpoint.com/) [**Management Console**] with Admin user credentials\n\n 1.2. In the **Management Console**, the cluster id is displayed in the upper-right corner.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve the API token\n 2.1. Log in to the [proofpoint](https://admin.proofpoint.com/) [**Management Console**] with Admin user credentials\n\n 2.2. In the **Management Console**, click **Settings** -> **API Key Management** \n\n 2.3. Under **API Key Management** click on the **PoD Logging** tab.\n\n 2.4. Get or create a new API key.""}}, {""parameters"": {""label"": ""Cluster Id"", ""placeholder"": ""cluster_id"", ""type"": ""text"", ""name"": ""clusterId""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""API Key"", ""placeholder"": ""API Key"", ""type"": ""text"", ""name"": ""apiKey""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}], ""customs"": [{""name"": ""Websocket API Credentials/permissions"", ""description"": ""**ProofpointClusterID**, and **ProofpointToken** are required. [See the documentation to learn more about API](https://proofpointcommunities.force.com/community/s/article/Proofpoint-on-Demand-Pod-Log-API).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Proofpoint%20On%20demand%28POD%29%20Email%20Security/Data%20Connectors/ProofPointEmailSecurity_CCP/ProofpointPOD_Definaton.json","true" +"ProofpointPODMessage_CL","Proofpoint On demand(POD) Email Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Proofpoint%20On%20demand%28POD%29%20Email%20Security","proofpointinc1600438591120","azure-sentinel-proofpointpod","2021-03-31","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointCCPDefinition","Proofpoint","Proofpoint On Demand Email Security (via Codeless Connector Platform)","Proofpoint On Demand Email Security data connector provides the capability to get Proofpoint on Demand Email Protection data, allows users to check message traceability, monitoring into email activity, threats,and data exfiltration by attackers and malicious insiders. The connector provides ability to review events in your org on an accelerated basis, get event log files in hourly increments for recent activity.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the Proofpoint POD Websocket API \n #### The PoD Log API does not allow use of the same token for more than one session at the same time, so make sure your token isn't used anywhere. \n Proofpoint Websocket API service requires Remote Syslog Forwarding license. Please refer the [documentation](https://proofpointcommunities.force.com/community/s/article/Proofpoint-on-Demand-Pod-Log-API) on how to enable and check PoD Log API. \n You must provide your cluster id and security token.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve the cluster id\n 1.1. Log in to the [proofpoint](https://admin.proofpoint.com/) [**Management Console**] with Admin user credentials\n\n 1.2. In the **Management Console**, the cluster id is displayed in the upper-right corner.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve the API token\n 2.1. Log in to the [proofpoint](https://admin.proofpoint.com/) [**Management Console**] with Admin user credentials\n\n 2.2. In the **Management Console**, click **Settings** -> **API Key Management** \n\n 2.3. Under **API Key Management** click on the **PoD Logging** tab.\n\n 2.4. Get or create a new API key.""}}, {""parameters"": {""label"": ""Cluster Id"", ""placeholder"": ""cluster_id"", ""type"": ""text"", ""name"": ""clusterId""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""API Key"", ""placeholder"": ""API Key"", ""type"": ""text"", ""name"": ""apiKey""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}], ""customs"": [{""name"": ""Websocket API Credentials/permissions"", ""description"": ""**ProofpointClusterID**, and **ProofpointToken** are required. [See the documentation to learn more about API](https://proofpointcommunities.force.com/community/s/article/Proofpoint-on-Demand-Pod-Log-API).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Proofpoint%20On%20demand%28POD%29%20Email%20Security/Data%20Connectors/ProofPointEmailSecurity_CCP/ProofpointPOD_Definaton.json","true" +"ProofpointPODMessage_CL","Proofpoint On demand(POD) Email Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Proofpoint%20On%20demand%28POD%29%20Email%20Security","proofpointinc1600438591120","azure-sentinel-proofpointpod","2021-03-31","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointPOD","Proofpoint","[Deprecated] Proofpoint On Demand Email Security","Proofpoint On Demand Email Security data connector provides the capability to get Proofpoint on Demand Email Protection data, allows users to check message traceability, monitoring into email activity, threats,and data exfiltration by attackers and malicious insiders. The connector provides ability to review events in your org on an accelerated basis, get event log files in hourly increments for recent activity.

NOTE: This data connector has been deprecated, consider moving to the CCP data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Proofpoint Websocket API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://aka.ms/sentinel-proofpointpod-parser) to create the Kusto functions alias, **ProofpointPOD**""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Proofpoint Websocket API**\n\n1. Proofpoint Websocket API service requires Remote Syslog Forwarding license. Please refer the [documentation](https://proofpointcommunities.force.com/community/s/article/Proofpoint-on-Demand-Pod-Log-API) on how to enable and check PoD Log API. \n2. You must provide your cluster id and security token.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Proofpoint On Demand Email Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Proofpoint POD Log API credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Proofpoint On Demand Email Security data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-proofpointpod-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ProofpointClusterID**, **ProofpointToken** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Proofpoint On Demand Email Security data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> NOTE:You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-proofpointpod-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ProofpointXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tProofpointClusterID\n\t\tProofpointToken\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Websocket API Credentials/permissions"", ""description"": ""**ProofpointClusterID**, **ProofpointToken** is required. [See the documentation to learn more about API](https://proofpointcommunities.force.com/community/s/article/Proofpoint-on-Demand-Pod-Log-API).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Proofpoint%20On%20demand%28POD%29%20Email%20Security/Data%20Connectors/ProofpointPOD_API_FunctionApp.json","true" +"ProofpointPOD_maillog_CL","Proofpoint On demand(POD) Email Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Proofpoint%20On%20demand%28POD%29%20Email%20Security","proofpointinc1600438591120","azure-sentinel-proofpointpod","2021-03-31","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointPOD","Proofpoint","[Deprecated] Proofpoint On Demand Email Security","Proofpoint On Demand Email Security data connector provides the capability to get Proofpoint on Demand Email Protection data, allows users to check message traceability, monitoring into email activity, threats,and data exfiltration by attackers and malicious insiders. The connector provides ability to review events in your org on an accelerated basis, get event log files in hourly increments for recent activity.

NOTE: This data connector has been deprecated, consider moving to the CCP data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Proofpoint Websocket API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://aka.ms/sentinel-proofpointpod-parser) to create the Kusto functions alias, **ProofpointPOD**""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Proofpoint Websocket API**\n\n1. Proofpoint Websocket API service requires Remote Syslog Forwarding license. Please refer the [documentation](https://proofpointcommunities.force.com/community/s/article/Proofpoint-on-Demand-Pod-Log-API) on how to enable and check PoD Log API. \n2. You must provide your cluster id and security token.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Proofpoint On Demand Email Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Proofpoint POD Log API credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Proofpoint On Demand Email Security data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-proofpointpod-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ProofpointClusterID**, **ProofpointToken** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Proofpoint On Demand Email Security data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> NOTE:You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-proofpointpod-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ProofpointXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tProofpointClusterID\n\t\tProofpointToken\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Websocket API Credentials/permissions"", ""description"": ""**ProofpointClusterID**, **ProofpointToken** is required. [See the documentation to learn more about API](https://proofpointcommunities.force.com/community/s/article/Proofpoint-on-Demand-Pod-Log-API).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Proofpoint%20On%20demand%28POD%29%20Email%20Security/Data%20Connectors/ProofpointPOD_API_FunctionApp.json","true" +"ProofpointPOD_message_CL","Proofpoint On demand(POD) Email Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Proofpoint%20On%20demand%28POD%29%20Email%20Security","proofpointinc1600438591120","azure-sentinel-proofpointpod","2021-03-31","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointPOD","Proofpoint","[Deprecated] Proofpoint On Demand Email Security","Proofpoint On Demand Email Security data connector provides the capability to get Proofpoint on Demand Email Protection data, allows users to check message traceability, monitoring into email activity, threats,and data exfiltration by attackers and malicious insiders. The connector provides ability to review events in your org on an accelerated basis, get event log files in hourly increments for recent activity.

NOTE: This data connector has been deprecated, consider moving to the CCP data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Proofpoint Websocket API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://aka.ms/sentinel-proofpointpod-parser) to create the Kusto functions alias, **ProofpointPOD**""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Proofpoint Websocket API**\n\n1. Proofpoint Websocket API service requires Remote Syslog Forwarding license. Please refer the [documentation](https://proofpointcommunities.force.com/community/s/article/Proofpoint-on-Demand-Pod-Log-API) on how to enable and check PoD Log API. \n2. You must provide your cluster id and security token.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Proofpoint On Demand Email Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Proofpoint POD Log API credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Proofpoint On Demand Email Security data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-proofpointpod-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ProofpointClusterID**, **ProofpointToken** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Proofpoint On Demand Email Security data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> NOTE:You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-proofpointpod-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ProofpointXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tProofpointClusterID\n\t\tProofpointToken\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Websocket API Credentials/permissions"", ""description"": ""**ProofpointClusterID**, **ProofpointToken** is required. [See the documentation to learn more about API](https://proofpointcommunities.force.com/community/s/article/Proofpoint-on-Demand-Pod-Log-API).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Proofpoint%20On%20demand%28POD%29%20Email%20Security/Data%20Connectors/ProofpointPOD_API_FunctionApp.json","true" +"maillog_CL","Proofpoint On demand(POD) Email Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Proofpoint%20On%20demand%28POD%29%20Email%20Security","proofpointinc1600438591120","azure-sentinel-proofpointpod","2021-03-31","","","Proofpoint, Inc.","Partner","https://proofpoint.my.site.com/community/s/","","domains","ProofpointPOD","Proofpoint","[Deprecated] Proofpoint On Demand Email Security","Proofpoint On Demand Email Security data connector provides the capability to get Proofpoint on Demand Email Protection data, allows users to check message traceability, monitoring into email activity, threats,and data exfiltration by attackers and malicious insiders. The connector provides ability to review events in your org on an accelerated basis, get event log files in hourly increments for recent activity.

NOTE: This data connector has been deprecated, consider moving to the CCP data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Proofpoint Websocket API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://aka.ms/sentinel-proofpointpod-parser) to create the Kusto functions alias, **ProofpointPOD**""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Proofpoint Websocket API**\n\n1. Proofpoint Websocket API service requires Remote Syslog Forwarding license. Please refer the [documentation](https://proofpointcommunities.force.com/community/s/article/Proofpoint-on-Demand-Pod-Log-API) on how to enable and check PoD Log API. \n2. You must provide your cluster id and security token.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Proofpoint On Demand Email Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Proofpoint POD Log API credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Proofpoint On Demand Email Security data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-proofpointpod-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ProofpointClusterID**, **ProofpointToken** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Proofpoint On Demand Email Security data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> NOTE:You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-proofpointpod-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ProofpointXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tProofpointClusterID\n\t\tProofpointToken\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Websocket API Credentials/permissions"", ""description"": ""**ProofpointClusterID**, **ProofpointToken** is required. [See the documentation to learn more about API](https://proofpointcommunities.force.com/community/s/article/Proofpoint-on-Demand-Pod-Log-API).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Proofpoint%20On%20demand%28POD%29%20Email%20Security/Data%20Connectors/ProofpointPOD_API_FunctionApp.json","true" +"Syslog","Pulse Connect Secure","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Pulse%20Connect%20Secure","azuresentinel","azure-sentinel-solution-pulseconnectsecure","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","PulseConnectSecure","Pulse Secure","[Deprecated] Pulse Connect Secure","The [Pulse Connect Secure](https://www.pulsesecure.net/products/pulse-connect-secure/) connector allows you to easily connect your Pulse Connect Secure logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigations. Integrating Pulse Connect Secure with Microsoft Sentinel provides more insight into your organization's network and improves your security operation capabilities.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Pulse Connect Secure and load the function code or click [here](https://aka.ms/sentinel-PulseConnectSecure-parser), on the second line of the query, enter the hostname(s) of your Pulse Connect Secure device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n 2. Select **Apply below configuration to my machines** and select the facilities and severities.\n 3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure and connect the Pulse Connect Secure"", ""description"": ""[Follow the instructions](https://help.ivanti.com/ps/help/en_US/PPS/9.1R13/ag/configuring_an_external_syslog_server.htm) to enable syslog streaming of Pulse Connect Secure logs. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Pulse Connect Secure"", ""description"": ""must be configured to export logs via Syslog""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Pulse%20Connect%20Secure/Data%20Connectors/Connector_Syslog_PulseConnectSecure.json","true" +"","Pure Storage","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Pure%20Storage","purestoragemarketplaceadmin","microsoft-sentinel-solution-purestorage","2024-02-05","","","purestoragemarketplaceadmin","Partner","https://support.purestorage.com","","domains","","","","","","","","false" +"QualysKB_CL","Qualys VM Knowledgebase","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Qualys%20VM%20Knowledgebase","azuresentinel","azure-sentinel-solution-qualysvmknowledgebase","2022-05-17","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","QualysKB","Qualys","Qualys VM KnowledgeBase","The [Qualys Vulnerability Management (VM)](https://www.qualys.com/apps/vulnerability-management/) KnowledgeBase (KB) connector provides the capability to ingest the latest vulnerability data from the Qualys KB into Microsoft Sentinel.

This data can used to correlate and enrich vulnerability detections found by the [Qualys Vulnerability Management (VM)](https://docs.microsoft.com/azure/sentinel/connect-qualys-vm) data connector.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias QualysVM Knowledgebase and load the function code or click [here](https://aka.ms/sentinel-crowdstrikefalconendpointprotection-parser), on the second line of the query, enter the hostname(s) of your QualysVM Knowledgebase device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": "">This data connector depends on a parser based on a Kusto Function to work as expected. [Follow the steps](https://aka.ms/sentinel-qualyskb-parser) to use the Kusto function alias, **QualysKB**""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Qualys API**\n\n1. Log into the Qualys Vulnerability Management console with an administrator account, select the **Users** tab and the **Users** subtab. \n2. Click on the **New** drop-down menu and select **Users**.\n3. Create a username and password for the API account. \n4. In the **User Roles** tab, ensure the account role is set to **Manager** and access is allowed to **GUI** and **API**\n4. Log out of the administrator account and log into the console with the new API credentials for validation, then log out of the API account. \n5. Log back into the console using an administrator account and modify the API accounts User Roles, removing access to **GUI**. \n6. Save all changes.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Qualys KB connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Qualys API username and password, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Qualys KB connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-qualyskb-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-qualyskb-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **API Username**, **API Password** , update the **URI**, and any additional URI **Filter Parameters** (This value should include a \""&\"" symbol between each parameter and should not include any spaces) \n> - Enter the URI that corresponds to your region. The complete list of API Server URLs can be [found here](https://www.qualys.com/docs/qualys-api-vmpc-user-guide.pdf#G4.735348)\n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n - Note: If deployment failed due to the storage account name being taken, change the **Function Name** to a unique value and redeploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""This method provides the step-by-step instructions to deploy the Qualys KB connector manually with Azure Function."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the [Azure Function App](https://aka.ms/sentinel-qualyskb-functioncode) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following seven (7) application settings individually, with their respective string values (case-sensitive): \n\t\tapiUsername\n\t\tapiPassword\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\turi\n\t\tfilterParameters\n\t\tlogAnalyticsUri (optional)\n> - Enter the URI that corresponds to your region. The complete list of API Server URLs can be [found here](https://www.qualys.com/docs/qualys-api-vmpc-user-guide.pdf#G4.735348). The `uri` value must follow the following schema: `https:///api/2.0` \n> - Add any additional filter parameters, for the `filterParameters` variable, that need to be appended to the URI. The `filterParameter` value should include a \""&\"" symbol between each parameter and should not include any spaces.\n> - Note: If using Azure Key Vault, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n - Use logAnalyticsUri to override the log analytics API endpoint for delegated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Qualys API Key"", ""description"": ""A Qualys VM API username and password is required. [See the documentation to learn more about Qualys VM API](https://www.qualys.com/docs/qualys-api-vmpc-user-guide.pdf).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Qualys%20VM%20Knowledgebase/Data%20Connectors/QualysKB_API_FunctionApp.json","true" +"QualysHostDetectionV3_CL","QualysVM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/QualysVM","azuresentinel","azure-sentinel-qualysvm","2020-12-14","2025-11-18","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","QualysVMLogsCCPDefinition","Microsoft","Qualys Vulnerability Management (via Codeless Connector Framework)","The [Qualys Vulnerability Management (VM)](https://www.qualys.com/apps/vulnerability-management/) data connector provides the capability to ingest vulnerability host detection data into Microsoft Sentinel through the Qualys API. The connector provides visibility into host detection data from vulerability scans.","[{""title"": ""Connect Qualys Vulnerability Management to Microsoft Sentinel"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": "">**NOTE:** To gather data for Detections based on Host, expand the **DetectionList** column in the table.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""To gather data from Qualys VM, you need to provide the following resources""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. API Credentials \n To gather data from Qualys VM, you'll need Qualys API credentials, including your Username and Password.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. API Server URL \n To gather data from Qualys VM, you'll need the Qualys API server URL specific to your region. You can find the exact API server URL for your region [here](https://www.qualys.com/platform-identification/#api-urls)""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Qualys API User Name"", ""placeholder"": ""Enter UserName"", ""type"": ""text"", ""name"": ""username"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Qualys API Password"", ""placeholder"": ""Enter password"", ""type"": ""password"", ""name"": ""password"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Qualys API Server URL"", ""placeholder"": ""Enter API Server URL"", ""type"": ""text"", ""name"": ""apiServerUrl"", ""required"": true, ""description"": ""Ensure the API Server URL starts with https:// and paste the whole API Server URL without / at the ending""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Truncation Limit \n Configure the maximum number of host records to retrieve per API call (20-5000 range). Higher values may improve performance but could impact API response times.""}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Truncation Limit"", ""name"": ""truncationLimit"", ""options"": [{""key"": ""1000"", ""text"": ""1000 - API default value""}, {""key"": ""20"", ""text"": ""20 - Minimal load, slower collection""}, {""key"": ""100"", ""text"": ""100 - Low load""}, {""key"": ""500"", ""text"": ""500 - Moderate load""}, {""key"": ""2500"", ""text"": ""2500 - High load, faster collection""}, {""key"": ""5000"", ""text"": ""5000 - Maximum load, fastest collection""}], ""placeholder"": ""Select truncation limit"", ""isMultiSelect"": false, ""required"": true}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""toggle""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}], ""customs"": [{""name"": ""API access and roles"", ""description"": ""Ensure the Qualys VM user has a role of Reader or higher. If the role is Reader, ensure that API access is enabled for the account. Auditor role is not supported to access the API. For more details, refer to the Qualys VM [Host Detection API](https://docs.qualys.com/en/vm/qweb-all-api/mergedProjects/qapi-assets/host_lists/host_detection.htm#v_3_0) and [User role Comparison](https://qualysguard.qualys.com/qwebhelp/fo_portal/user_accounts/user_roles_comparison_vm.htm) document.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/QualysVM/Data%20Connectors/QualysVMHostLogs_ccp/QualysVMHostLogs_ConnectorDefinition.json","true" +"QualysHostDetectionV2_CL","QualysVM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/QualysVM","azuresentinel","azure-sentinel-qualysvm","2020-12-14","2025-11-18","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","QualysVulnerabilityManagement","Qualys","[DEPRECATED] Qualys Vulnerability Management","The [Qualys Vulnerability Management (VM)](https://www.qualys.com/apps/vulnerability-management/) data connector provides the capability to ingest vulnerability host detection data into Microsoft Sentinel through the Qualys API. The connector provides visibility into host detection data from vulerability scans. This connector provides Microsoft Sentinel the capability to view dashboards, create custom alerts, and improve investigation

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to Qualys VM to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Qualys VM API**\n\n1. Log into the Qualys Vulnerability Management console with an administrator account, select the **Users** tab and the **Users** subtab. \n2. Click on the **New** drop-down menu and select **Users..**\n3. Create a username and password for the API account. \n4. In the **User Roles** tab, ensure the account role is set to **Manager** and access is allowed to **GUI** and **API**\n4. Log out of the administrator account and log into the console with the new API credentials for validation, then log out of the API account. \n5. Log back into the console using an administrator account and modify the API accounts User Roles, removing access to **GUI**. \n6. Save all changes.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Qualys VM connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Qualys VM API Authorization Key(s), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated, if you have previously deployed an earlier version, and want to update, please delete the existing Qualys VM Azure Function before redeploying this version. Please use Qualys V2 version Workbook, detections. ""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Qualys VM connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-QualysVM-azuredeployV2) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-QualysVM-azuredeployV2-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **API Username**, **API Password** , update the **URI**, and any additional URI **Filter Parameters** (each filter should be separated by an \""&\"" symbol, no spaces.) \n> - Enter the URI that corresponds to your region. The complete list of API Server URLs can be [found here](https://www.qualys.com/docs/qualys-api-vmpc-user-guide.pdf#G4.735348) -- There is no need to add a time suffix to the URI, the Function App will dynamically append the Time Value to the URI in the proper format. \n - The default **Time Interval** is set to pull the last five (5) minutes of data. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion. \n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Quayls VM connector manually with Azure Functions.""}, {""title"": """", ""description"": ""**1. Create a Function App**\n\n1. From the Azure Portal, navigate to [Function App](https://portal.azure.com/#blade/HubsExtension/BrowseResource/resourceType/Microsoft.Web%2Fsites/kind/functionapp), and select **+ Add**.\n2. In the **Basics** tab, ensure Runtime stack is set to **Powershell Core**. \n3. In the **Hosting** tab, ensure the **Consumption (Serverless)** plan type is selected.\n4. Make other preferrable configuration changes, if needed, then click **Create**.""}, {""title"": """", ""description"": ""**2. Import Function App Code**\n\n1. In the newly created Function App, select **Functions** on the left pane and click **+ New Function**.\n2. Select **Timer Trigger**.\n3. Enter a unique Function **Name** and leave the default cron schedule of every 5 minutes, then click **Create**.\n5. Click on **Code + Test** on the left pane. \n6. Copy the [Function App Code](https://aka.ms/sentinel-QualysVM-functioncodeV2) and paste into the Function App `run.ps1` editor.\n7. Click **Save**.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following eight (8) application settings individually, with their respective string values (case-sensitive): \n\t\tapiUsername\n\t\tapiPassword\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\turi\n\t\tfilterParameters\n\t\ttimeInterval\n\t\tlogAnalyticsUri (optional)\n> - Enter the URI that corresponds to your region. The complete list of API Server URLs can be [found here](https://www.qualys.com/docs/qualys-api-vmpc-user-guide.pdf#G4.735348). The `uri` value must follow the following schema: `https:///api/2.0/fo/asset/host/vm/detection/?action=list&vm_processed_after=` -- There is no need to add a time suffix to the URI, the Function App will dynamically append the Time Value to the URI in the proper format.\n> - Add any additional filter parameters, for the `filterParameters` variable, that need to be appended to the URI. Each parameter should be seperated by an \""&\"" symbol and should not include any spaces.\n> - Set the `timeInterval` (in minutes) to the value of `5` to correspond to the Timer Trigger of every `5` minutes. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly to prevent overlapping data ingestion.\n> - Note: If using Azure Key Vault, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}, {""title"": """", ""description"": ""**4. Configure the host.json**.\n\nDue to the potentially large amount of Qualys host detection data being ingested, it can cause the execution time to surpass the default Function App timeout of five (5) minutes. Increase the default timeout duration to the maximum of ten (10) minutes, under the Consumption Plan, to allow more time for the Function App to execute.\n\n1. In the Function App, select the Function App Name and select the **App Service Editor** blade.\n2. Click **Go** to open the editor, then select the **host.json** file under the **wwwroot** directory.\n3. Add the line `\""functionTimeout\"": \""00:10:00\"",` above the `managedDependancy` line \n4. Ensure **SAVED** appears on the top right corner of the editor, then exit the editor.\n\n> NOTE: If a longer timeout duration is required, consider upgrading to an [App Service Plan](https://docs.microsoft.com/azure/azure-functions/functions-scale#timeout)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Qualys API Key"", ""description"": ""A Qualys VM API username and password is required. [See the documentation to learn more about Qualys VM API](https://www.qualys.com/docs/qualys-api-vmpc-user-guide.pdf).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/QualysVM/Data%20Connectors/QualysVM_API_FunctionApp.json","true" +"QualysHostDetection_CL","QualysVM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/QualysVM","azuresentinel","azure-sentinel-qualysvm","2020-12-14","2025-11-18","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","QualysVulnerabilityManagement","Qualys","[DEPRECATED] Qualys Vulnerability Management","The [Qualys Vulnerability Management (VM)](https://www.qualys.com/apps/vulnerability-management/) data connector provides the capability to ingest vulnerability host detection data into Microsoft Sentinel through the Qualys API. The connector provides visibility into host detection data from vulerability scans. This connector provides Microsoft Sentinel the capability to view dashboards, create custom alerts, and improve investigation

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to Qualys VM to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Qualys VM API**\n\n1. Log into the Qualys Vulnerability Management console with an administrator account, select the **Users** tab and the **Users** subtab. \n2. Click on the **New** drop-down menu and select **Users..**\n3. Create a username and password for the API account. \n4. In the **User Roles** tab, ensure the account role is set to **Manager** and access is allowed to **GUI** and **API**\n4. Log out of the administrator account and log into the console with the new API credentials for validation, then log out of the API account. \n5. Log back into the console using an administrator account and modify the API accounts User Roles, removing access to **GUI**. \n6. Save all changes.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Qualys VM connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Qualys VM API Authorization Key(s), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": "">**NOTE:** This connector has been updated, if you have previously deployed an earlier version, and want to update, please delete the existing Qualys VM Azure Function before redeploying this version. Please use Qualys V2 version Workbook, detections. ""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Qualys VM connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-QualysVM-azuredeployV2) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-QualysVM-azuredeployV2-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **API Username**, **API Password** , update the **URI**, and any additional URI **Filter Parameters** (each filter should be separated by an \""&\"" symbol, no spaces.) \n> - Enter the URI that corresponds to your region. The complete list of API Server URLs can be [found here](https://www.qualys.com/docs/qualys-api-vmpc-user-guide.pdf#G4.735348) -- There is no need to add a time suffix to the URI, the Function App will dynamically append the Time Value to the URI in the proper format. \n - The default **Time Interval** is set to pull the last five (5) minutes of data. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion. \n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Quayls VM connector manually with Azure Functions.""}, {""title"": """", ""description"": ""**1. Create a Function App**\n\n1. From the Azure Portal, navigate to [Function App](https://portal.azure.com/#blade/HubsExtension/BrowseResource/resourceType/Microsoft.Web%2Fsites/kind/functionapp), and select **+ Add**.\n2. In the **Basics** tab, ensure Runtime stack is set to **Powershell Core**. \n3. In the **Hosting** tab, ensure the **Consumption (Serverless)** plan type is selected.\n4. Make other preferrable configuration changes, if needed, then click **Create**.""}, {""title"": """", ""description"": ""**2. Import Function App Code**\n\n1. In the newly created Function App, select **Functions** on the left pane and click **+ New Function**.\n2. Select **Timer Trigger**.\n3. Enter a unique Function **Name** and leave the default cron schedule of every 5 minutes, then click **Create**.\n5. Click on **Code + Test** on the left pane. \n6. Copy the [Function App Code](https://aka.ms/sentinel-QualysVM-functioncodeV2) and paste into the Function App `run.ps1` editor.\n7. Click **Save**.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following eight (8) application settings individually, with their respective string values (case-sensitive): \n\t\tapiUsername\n\t\tapiPassword\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\turi\n\t\tfilterParameters\n\t\ttimeInterval\n\t\tlogAnalyticsUri (optional)\n> - Enter the URI that corresponds to your region. The complete list of API Server URLs can be [found here](https://www.qualys.com/docs/qualys-api-vmpc-user-guide.pdf#G4.735348). The `uri` value must follow the following schema: `https:///api/2.0/fo/asset/host/vm/detection/?action=list&vm_processed_after=` -- There is no need to add a time suffix to the URI, the Function App will dynamically append the Time Value to the URI in the proper format.\n> - Add any additional filter parameters, for the `filterParameters` variable, that need to be appended to the URI. Each parameter should be seperated by an \""&\"" symbol and should not include any spaces.\n> - Set the `timeInterval` (in minutes) to the value of `5` to correspond to the Timer Trigger of every `5` minutes. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly to prevent overlapping data ingestion.\n> - Note: If using Azure Key Vault, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details.\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}, {""title"": """", ""description"": ""**4. Configure the host.json**.\n\nDue to the potentially large amount of Qualys host detection data being ingested, it can cause the execution time to surpass the default Function App timeout of five (5) minutes. Increase the default timeout duration to the maximum of ten (10) minutes, under the Consumption Plan, to allow more time for the Function App to execute.\n\n1. In the Function App, select the Function App Name and select the **App Service Editor** blade.\n2. Click **Go** to open the editor, then select the **host.json** file under the **wwwroot** directory.\n3. Add the line `\""functionTimeout\"": \""00:10:00\"",` above the `managedDependancy` line \n4. Ensure **SAVED** appears on the top right corner of the editor, then exit the editor.\n\n> NOTE: If a longer timeout duration is required, consider upgrading to an [App Service Plan](https://docs.microsoft.com/azure/azure-functions/functions-scale#timeout)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Qualys API Key"", ""description"": ""A Qualys VM API username and password is required. [See the documentation to learn more about Qualys VM API](https://www.qualys.com/docs/qualys-api-vmpc-user-guide.pdf).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/QualysVM/Data%20Connectors/QualysVM_API_FunctionApp.json","true" +"QscoutAppEvents_CL","Quokka","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Quokka","quokka","azure-sentinel-solution-quokka","2025-10-30","","","Quokka","Partner","https://www.quokka.io/contact-us#customer-support","","domains","QscoutAppEventsCCFDefinition","Quokka","QscoutAppEventsConnector","Ingest Qscout application events into Microsoft Sentinel","[{""description"": "">**NOTE:** This connector uses Codeless Connector Framework (CCF) to connect to the Qscout app events feed and ingest data into Microsoft Sentinel""}, {""description"": ""Provide the required values below:\n"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Qscout Organization ID"", ""placeholder"": ""123456"", ""type"": ""text"", ""name"": ""organizationId""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Qscout Organization API Key"", ""placeholder"": ""abcdxyz"", ""type"": ""text"", ""name"": ""apiKey""}}, {""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required"", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true, ""read"": true}}], ""customs"": [{""name"": ""Qscout organization id"", ""description"": ""The API requires your organization ID in Qscout.""}, {""name"": ""Qscout organization API key"", ""description"": ""The API requires your organization API key in Qscout.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Quokka/Data%20Connectors/QuokkaQscoutAppEventsLogs_ccf/QuokkaQscoutAppEventsLogs_connectorDefinition.json","true" +"Syslog","RSA SecurID","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RSA%20SecurID","azuresentinel","azure-sentinel-solution-securid","2021-09-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","RSASecurIDAM","RSA","[Deprecated] RSA® SecurID (Authentication Manager)","The [RSA® SecurID Authentication Manager](https://www.securid.com/) data connector provides the capability to ingest [RSA® SecurID Authentication Manager events](https://community.rsa.com/t5/rsa-authentication-manager/rsa-authentication-manager-log-messages/ta-p/630160) into Microsoft Sentinel. Refer to [RSA® SecurID Authentication Manager documentation](https://community.rsa.com/t5/rsa-authentication-manager/getting-started-with-rsa-authentication-manager/ta-p/569582) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**RSASecurIDAMEvent**](https://aka.ms/sentinel-rsasecuridam-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**NOTE:** This data connector has been developed using RSA SecurID Authentication Manager version: 8.4 and 8.5"", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Server where the RSA\u00ae SecurID Authentication Manager logs are forwarded.\n\n> Logs from RSA\u00ae SecurID Authentication Manager Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure RSA\u00ae SecurID Authentication Manager event forwarding"", ""description"": ""Follow the configuration steps below to get RSA\u00ae SecurID Authentication Manager logs into Microsoft Sentinel.\n1. [Follow these instructions](https://community.rsa.com/t5/rsa-authentication-manager/configure-the-remote-syslog-host-for-real-time-log-monitoring/ta-p/571374) to forward alerts from the Manager to a syslog server.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RSA%20SecurID/Data%20Connectors/RSASecurID.json","true" +"RSAIDPlus_AdminLogs_CL","RSAIDPlus_AdminLogs_Connector","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RSAIDPlus_AdminLogs_Connector","rsasecurity1687281258544","azure-sentinel-solution-rsa_id_plus_admin_log","2025-10-14","","","RSA Support Team","Partner","https://community.rsa.com/","","domains,verticals","RSAIDPlus_AdmingLogs_Connector","RSA","RSA ID Plus Admin Logs Connector","The RSA ID Plus AdminLogs Connector provides the capability to ingest [Cloud Admin Console Audit Events](https://community.rsa.com/s/article/Cloud-Administration-Event-Log-API-5d22ba17) into Microsoft Sentinel using Cloud Admin APIs.","[{""description"": "">**NOTE:** This connector uses Codeless Connector Framework (CCF) to connect to the RSA ID Plus Cloud Admin APIs to pull logs into Microsoft Sentinel.""}, {""title"": ""**STEP 1** - Create Legacy Admin API Client in Cloud Admin Console."", ""description"": ""Follow steps mentioned in this [page](https://community.rsa.com/s/article/Manage-Legacy-Clients-API-Keys-a89c9cbc#).""}, {""title"": ""**STEP 2** - Generate the Base64URL encoded JWT Token."", ""description"": ""Follow the steps mentioned in this [page](https://community.rsa.com/s/article/Authentication-for-the-Cloud-Administration-APIs-a04e3fb9) under the header 'Legacy Administration API'.""}, {""title"": ""**STEP 3** - Configure the Cloud Admin API to start ingesting Admin event logs into Microsoft Sentinel."", ""description"": ""Provide the required values below:\n"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Admin API URL"", ""placeholder"": ""https://.access.securid.com/AdminInterface/restapi/v1/adminlog/exportLogs"", ""type"": ""text"", ""name"": ""Admin-API-URL""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""JWT Token"", ""placeholder"": ""Enter your JWT Token"", ""type"": ""password"", ""name"": ""access_token""}}]}, {""title"": ""**STEP 4** - Click Connect"", ""description"": ""Verify all the fields above were filled in correctly. Press Connect to start the connector."", ""instructions"": [{""type"": ""ConnectionToggleButton"", ""parameters"": {""connectLabel"": ""Connect"", ""name"": ""connect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""RSA ID Plus API Authentication"", ""description"": ""To access the Admin APIs, a valid Base64URL encoded JWT token, signed with the client's Legacy Administration API key is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RSAIDPlus_AdminLogs_Connector/Data%20Connectors/RSIDPlus_AdminLogs_Connector_CCP/RSAIDPlus_AdminLogs_ConnectorDefinition.json","true" +"CommonSecurityLog","Radiflow","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Radiflow","radiflow","azure-sentinel-solution-radiflow","2024-06-26","","","Radiflow","Partner","https://www.radiflow.com","","domains","RadiflowIsid","Radiflow","Radiflow iSID via AMA","iSID enables non-disruptive monitoring of distributed ICS networks for changes in topology and behavior, using multiple security packages, each offering a unique capability pertaining to a specific type of network activity","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**RadiflowEvent**] which is deployed with the Microsoft Sentinel Solution."", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade.\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule).\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy._\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine."", ""instructions"": []}, {""title"": ""Step B. Configure iSID to send logs using CEF"", ""description"": ""Configure log forwarding using CEF:\n\n1. Navigate to the **System Notifications** section of the Configuration menu.\n\n2. Under Syslog, select **+Add**.\n\n3. In the **New Syslog Server** dialog specify the name, remote server **IP**, **Port**, **Transport** and select **Format** - **CEF**.\n\n4. Press **Apply** to exit the **Add Syslog dialog**."", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python --version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Radiflow/Data%20Connectors/RadiflowIsid.json","true" +"NexposeInsightVMCloud_assets_CL","Rapid7InsightVM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Rapid7InsightVM","azuresentinel","azure-sentinel-solution-rapid7insightvm","2021-07-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","InsightVMCloudAPI","Rapid7","Rapid7 Insight Platform Vulnerability Management Reports","The [Rapid7 Insight VM](https://www.rapid7.com/products/insightvm/) Report data connector provides the capability to ingest Scan reports and vulnerability data into Microsoft Sentinel through the REST API from the Rapid7 Insight platform (Managed in the cloud). Refer to [API documentation](https://docs.rapid7.com/insight/api-overview/) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Insight VM API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parsers based on a Kusto Function to work as expected [**InsightVMAssets**](https://aka.ms/sentinel-InsightVMAssets-parser) and [**InsightVMVulnerabilities**](https://aka.ms/sentinel-InsightVMVulnerabilities-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Insight VM Cloud**\n\n [Follow the instructions](https://docs.rapid7.com/insight/managing-platform-api-keys/) to obtain the credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Rapid7 Insight Vulnerability Management Report data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-InsightVMCloudAPI-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **InsightVMAPIKey**, choose **InsightVMCloudRegion** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Rapid7 Insight Vulnerability Management Report data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the [Azure Function App](https://github.com/averbn/azure_sentinel_data_connectors/raw/main/insight-vm-cloud-azure-sentinel-data-connector/InsightVMCloudAPISentinelConn.zip) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tInsightVMAPIKey\n\t\tInsightVMCloudRegion\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials"", ""description"": ""**InsightVMAPIKey** is required for REST API. [See the documentation to learn more about API](https://docs.rapid7.com/insight/api-overview/). Check all [requirements and follow the instructions](https://docs.rapid7.com/insight/managing-platform-api-keys/) for obtaining credentials""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Rapid7InsightVM/Data%20Connectors/InsightVMCloud_API_FunctionApp.json","true" +"NexposeInsightVMCloud_vulnerabilities_CL","Rapid7InsightVM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Rapid7InsightVM","azuresentinel","azure-sentinel-solution-rapid7insightvm","2021-07-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","InsightVMCloudAPI","Rapid7","Rapid7 Insight Platform Vulnerability Management Reports","The [Rapid7 Insight VM](https://www.rapid7.com/products/insightvm/) Report data connector provides the capability to ingest Scan reports and vulnerability data into Microsoft Sentinel through the REST API from the Rapid7 Insight platform (Managed in the cloud). Refer to [API documentation](https://docs.rapid7.com/insight/api-overview/) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Insight VM API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parsers based on a Kusto Function to work as expected [**InsightVMAssets**](https://aka.ms/sentinel-InsightVMAssets-parser) and [**InsightVMVulnerabilities**](https://aka.ms/sentinel-InsightVMVulnerabilities-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Insight VM Cloud**\n\n [Follow the instructions](https://docs.rapid7.com/insight/managing-platform-api-keys/) to obtain the credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Rapid7 Insight Vulnerability Management Report data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-InsightVMCloudAPI-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **InsightVMAPIKey**, choose **InsightVMCloudRegion** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Rapid7 Insight Vulnerability Management Report data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the [Azure Function App](https://github.com/averbn/azure_sentinel_data_connectors/raw/main/insight-vm-cloud-azure-sentinel-data-connector/InsightVMCloudAPISentinelConn.zip) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tInsightVMAPIKey\n\t\tInsightVMCloudRegion\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials"", ""description"": ""**InsightVMAPIKey** is required for REST API. [See the documentation to learn more about API](https://docs.rapid7.com/insight/api-overview/). Check all [requirements and follow the instructions](https://docs.rapid7.com/insight/managing-platform-api-keys/) for obtaining credentials""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Rapid7InsightVM/Data%20Connectors/InsightVMCloud_API_FunctionApp.json","true" +"","Recorded Future","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Recorded%20Future","recordedfuture1605638642586","recorded_future_sentinel_solution","2021-11-01","2023-09-19","","Recorded Future Support Team","Partner","http://support.recordedfuture.com/","","domains","","","","","","","","false" +"","Recorded Future Identity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Recorded%20Future%20Identity","recordedfuture1605638642586","recorded_future_identity_solution","2022-09-06","2025-04-02","","Recorded Future Support Team","Partner","https://support.recordedfuture.com/","","domains","","","","","","","","false" +"RedCanaryDetections_CL","Red Canary","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Red%20Canary","Red Canary","microsoft-sentinel-solution-RedCanary","2022-03-04","2022-03-04","","Red Canary","Partner","https://www.redcanary.com","","domains","RedCanaryDataConnector","Red Canary","Red Canary Threat Detection","The Red Canary data connector provides the capability to ingest published Detections into Microsoft Sentinel using the Data Collector REST API.","[{""title"": """", ""description"": ""Create an Automate Playbook and Trigger as detailed in [this article](https://help.redcanary.com/hc/en-us/articles/4410957523479-Azure-Sentinel). You can skip the **Add analysis rule to Microsoft Sentinel** section; this data connector allows you to import the analysis rule directly into your workspace."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Red%20Canary/Data%20Connectors/RedCanaryDataConnector.json","true" +"","ReversingLabs","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ReversingLabs","reversinglabs1597673283347","rl_offer_content_hub_aoae","2022-08-08","2024-07-17","","ReversingLabs","Partner","https://support.reversinglabs.com/hc/en-us","","domains","","","","","","","","false" +"CommonSecurityLog","RidgeSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RidgeSecurity","ridgesecuritytechnologyinc1670890478389","microsoft-sentinel-solution-ridgesecurity","2023-10-23","2023-10-23","","RidgeSecurity","Partner","https://ridgesecurity.ai/about-us/","","domains","RidgeBotDataConnector","RidgeSecurity","[Deprecated] RIDGEBOT - data connector for Microsoft Sentinel","The RidgeBot connector lets users connect RidgeBot with Microsoft Sentinel, allowing creation of Dashboards, Workbooks, Notebooks and Alerts.","[{""title"": """", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine""}, {""title"": ""Step B. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Configure the RidgeBot to forward events to syslog server as described here: https://portal.ridgesecurity.ai/downloadurl/89x72912. Generate some attack events for your application.""}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RidgeSecurity/Data%20Connectors/RidgeSecurity.json","true" +"","RiskIQ","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RiskIQ","azuresentinel","azure-sentinel-solution-riskiq","2021-10-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"Rubrik_Anomaly_Data_CL","RubrikSecurityCloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RubrikSecurityCloud","rubrik_inc","rubrik_sentinel","2022-07-19","2025-07-25","","Rubrik","Partner","https://support.rubrik.com","","domains","RubrikSecurityCloudAzureFunctions","Rubrik, Inc","Rubrik Security Cloud data connector","The Rubrik Security Cloud data connector enables security operations teams to integrate insights from Rubrik's Data Observability services into Microsoft Sentinel. The insights include identification of anomalous filesystem behavior associated with ransomware and mass deletion, assess the blast radius of a ransomware attack, and sensitive data operators to prioritize and more rapidly investigate potential incidents.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Rubrik webhook which push its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Rubrik Microsoft Sentinel data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Rubrik connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-RubrikWebhookEvents-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tAnomaliesTableName \n\t\tRansomwareAnalysisTableName \n\t\tThreatHuntsTableName \n\t\tEventsTableName \n\t\tLogLevel \n \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Rubrik Microsoft Sentinel data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-RubrikWebhookEvents-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. RubrikXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tAnomaliesTableName\n\t\tRansomwareAnalysisTableName\n\t\tThreatHuntsTableName\n\t\tEventsTableName\n\t\tLogLevel\n\t\tlogAnalyticsUri (optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: https://.ods.opinsights.azure.us. \n4. Once all application settings have been entered, click **Save**.""}, {""title"": """", ""description"": ""**Post Deployment steps**\n\n""}, {""title"": ""1) Get the Function app endpoint"", ""description"": ""1. Go to Azure function Overview page and Click on **\""Functions\""** tab.\n2. Click on the function called **\""RubrikHttpStarter\""**.\n3. Go to **\""GetFunctionurl\""** and copy the function url.""}, {""title"": ""2) Add a webhook in RubrikSecurityCloud to send data to Microsoft Sentinel."", ""description"": ""Follow the Rubrik User Guide instructions to [Add a Webhook](https://docs.rubrik.com/en-us/saas/saas/common/adding_webhook.html) to begin receiving event information \n 1. Select the Microsoft Sentinel as the webhook Provider \n 2. Enter the desired Webhook name \n 3. Enter the URL part from copied Function-url as the webhook URL endpoint and replace **{functionname}** with **\""RubrikAnomalyOrchestrator\""**, for the Rubrik Microsoft Sentinel Solution \n 4. Select the EventType as Anomaly \n 5. Select the following severity levels: Critical, Warning, Informational \n 6. Choose multiple log types, if desired, when running **\""RubrikEventsOrchestrator\""** \n 7. Repeat the same steps to add webhooks for Anomaly Detection Analysis, Threat Hunt and Other Events.\n \n\n NOTE: while adding webhooks for Anomaly Detection Analysis, Threat Hunt and Other Events, replace **{functionname}** with **\""RubrikRansomwareOrchestrator\""**, **\""RubrikThreatHuntOrchestrator\""** and **\""RubrikEventsOrchestrator\""** respectively in copied function-url.""}, {""title"": """", ""description"": ""*Now we are done with the rubrik Webhook configuration. Once the webhook events triggered , you should be able to see the Anomaly, Anomaly Detection Analysis, Threat Hunt events and Other Events from the Rubrik into respective LogAnalytics workspace table called \""Rubrik_Anomaly_Data_CL\"", \""Rubrik_Ransomware_Data_CL\"", \""Rubrik_ThreatHunt_Data_CL\"", and \""Rubrik_Events_Data_CL\"".*\n\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RubrikSecurityCloud/Data%20Connectors/RubrikWebhookEvents/RubrikWebhookEvents_FunctionApp.json","true" +"Rubrik_Events_Data_CL","RubrikSecurityCloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RubrikSecurityCloud","rubrik_inc","rubrik_sentinel","2022-07-19","2025-07-25","","Rubrik","Partner","https://support.rubrik.com","","domains","RubrikSecurityCloudAzureFunctions","Rubrik, Inc","Rubrik Security Cloud data connector","The Rubrik Security Cloud data connector enables security operations teams to integrate insights from Rubrik's Data Observability services into Microsoft Sentinel. The insights include identification of anomalous filesystem behavior associated with ransomware and mass deletion, assess the blast radius of a ransomware attack, and sensitive data operators to prioritize and more rapidly investigate potential incidents.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Rubrik webhook which push its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Rubrik Microsoft Sentinel data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Rubrik connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-RubrikWebhookEvents-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tAnomaliesTableName \n\t\tRansomwareAnalysisTableName \n\t\tThreatHuntsTableName \n\t\tEventsTableName \n\t\tLogLevel \n \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Rubrik Microsoft Sentinel data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-RubrikWebhookEvents-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. RubrikXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tAnomaliesTableName\n\t\tRansomwareAnalysisTableName\n\t\tThreatHuntsTableName\n\t\tEventsTableName\n\t\tLogLevel\n\t\tlogAnalyticsUri (optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: https://.ods.opinsights.azure.us. \n4. Once all application settings have been entered, click **Save**.""}, {""title"": """", ""description"": ""**Post Deployment steps**\n\n""}, {""title"": ""1) Get the Function app endpoint"", ""description"": ""1. Go to Azure function Overview page and Click on **\""Functions\""** tab.\n2. Click on the function called **\""RubrikHttpStarter\""**.\n3. Go to **\""GetFunctionurl\""** and copy the function url.""}, {""title"": ""2) Add a webhook in RubrikSecurityCloud to send data to Microsoft Sentinel."", ""description"": ""Follow the Rubrik User Guide instructions to [Add a Webhook](https://docs.rubrik.com/en-us/saas/saas/common/adding_webhook.html) to begin receiving event information \n 1. Select the Microsoft Sentinel as the webhook Provider \n 2. Enter the desired Webhook name \n 3. Enter the URL part from copied Function-url as the webhook URL endpoint and replace **{functionname}** with **\""RubrikAnomalyOrchestrator\""**, for the Rubrik Microsoft Sentinel Solution \n 4. Select the EventType as Anomaly \n 5. Select the following severity levels: Critical, Warning, Informational \n 6. Choose multiple log types, if desired, when running **\""RubrikEventsOrchestrator\""** \n 7. Repeat the same steps to add webhooks for Anomaly Detection Analysis, Threat Hunt and Other Events.\n \n\n NOTE: while adding webhooks for Anomaly Detection Analysis, Threat Hunt and Other Events, replace **{functionname}** with **\""RubrikRansomwareOrchestrator\""**, **\""RubrikThreatHuntOrchestrator\""** and **\""RubrikEventsOrchestrator\""** respectively in copied function-url.""}, {""title"": """", ""description"": ""*Now we are done with the rubrik Webhook configuration. Once the webhook events triggered , you should be able to see the Anomaly, Anomaly Detection Analysis, Threat Hunt events and Other Events from the Rubrik into respective LogAnalytics workspace table called \""Rubrik_Anomaly_Data_CL\"", \""Rubrik_Ransomware_Data_CL\"", \""Rubrik_ThreatHunt_Data_CL\"", and \""Rubrik_Events_Data_CL\"".*\n\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RubrikSecurityCloud/Data%20Connectors/RubrikWebhookEvents/RubrikWebhookEvents_FunctionApp.json","true" +"Rubrik_Ransomware_Data_CL","RubrikSecurityCloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RubrikSecurityCloud","rubrik_inc","rubrik_sentinel","2022-07-19","2025-07-25","","Rubrik","Partner","https://support.rubrik.com","","domains","RubrikSecurityCloudAzureFunctions","Rubrik, Inc","Rubrik Security Cloud data connector","The Rubrik Security Cloud data connector enables security operations teams to integrate insights from Rubrik's Data Observability services into Microsoft Sentinel. The insights include identification of anomalous filesystem behavior associated with ransomware and mass deletion, assess the blast radius of a ransomware attack, and sensitive data operators to prioritize and more rapidly investigate potential incidents.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Rubrik webhook which push its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Rubrik Microsoft Sentinel data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Rubrik connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-RubrikWebhookEvents-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tAnomaliesTableName \n\t\tRansomwareAnalysisTableName \n\t\tThreatHuntsTableName \n\t\tEventsTableName \n\t\tLogLevel \n \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Rubrik Microsoft Sentinel data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-RubrikWebhookEvents-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. RubrikXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tAnomaliesTableName\n\t\tRansomwareAnalysisTableName\n\t\tThreatHuntsTableName\n\t\tEventsTableName\n\t\tLogLevel\n\t\tlogAnalyticsUri (optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: https://.ods.opinsights.azure.us. \n4. Once all application settings have been entered, click **Save**.""}, {""title"": """", ""description"": ""**Post Deployment steps**\n\n""}, {""title"": ""1) Get the Function app endpoint"", ""description"": ""1. Go to Azure function Overview page and Click on **\""Functions\""** tab.\n2. Click on the function called **\""RubrikHttpStarter\""**.\n3. Go to **\""GetFunctionurl\""** and copy the function url.""}, {""title"": ""2) Add a webhook in RubrikSecurityCloud to send data to Microsoft Sentinel."", ""description"": ""Follow the Rubrik User Guide instructions to [Add a Webhook](https://docs.rubrik.com/en-us/saas/saas/common/adding_webhook.html) to begin receiving event information \n 1. Select the Microsoft Sentinel as the webhook Provider \n 2. Enter the desired Webhook name \n 3. Enter the URL part from copied Function-url as the webhook URL endpoint and replace **{functionname}** with **\""RubrikAnomalyOrchestrator\""**, for the Rubrik Microsoft Sentinel Solution \n 4. Select the EventType as Anomaly \n 5. Select the following severity levels: Critical, Warning, Informational \n 6. Choose multiple log types, if desired, when running **\""RubrikEventsOrchestrator\""** \n 7. Repeat the same steps to add webhooks for Anomaly Detection Analysis, Threat Hunt and Other Events.\n \n\n NOTE: while adding webhooks for Anomaly Detection Analysis, Threat Hunt and Other Events, replace **{functionname}** with **\""RubrikRansomwareOrchestrator\""**, **\""RubrikThreatHuntOrchestrator\""** and **\""RubrikEventsOrchestrator\""** respectively in copied function-url.""}, {""title"": """", ""description"": ""*Now we are done with the rubrik Webhook configuration. Once the webhook events triggered , you should be able to see the Anomaly, Anomaly Detection Analysis, Threat Hunt events and Other Events from the Rubrik into respective LogAnalytics workspace table called \""Rubrik_Anomaly_Data_CL\"", \""Rubrik_Ransomware_Data_CL\"", \""Rubrik_ThreatHunt_Data_CL\"", and \""Rubrik_Events_Data_CL\"".*\n\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RubrikSecurityCloud/Data%20Connectors/RubrikWebhookEvents/RubrikWebhookEvents_FunctionApp.json","true" +"Rubrik_ThreatHunt_Data_CL","RubrikSecurityCloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RubrikSecurityCloud","rubrik_inc","rubrik_sentinel","2022-07-19","2025-07-25","","Rubrik","Partner","https://support.rubrik.com","","domains","RubrikSecurityCloudAzureFunctions","Rubrik, Inc","Rubrik Security Cloud data connector","The Rubrik Security Cloud data connector enables security operations teams to integrate insights from Rubrik's Data Observability services into Microsoft Sentinel. The insights include identification of anomalous filesystem behavior associated with ransomware and mass deletion, assess the blast radius of a ransomware attack, and sensitive data operators to prioritize and more rapidly investigate potential incidents.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Rubrik webhook which push its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Rubrik Microsoft Sentinel data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Rubrik connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-RubrikWebhookEvents-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tAnomaliesTableName \n\t\tRansomwareAnalysisTableName \n\t\tThreatHuntsTableName \n\t\tEventsTableName \n\t\tLogLevel \n \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Rubrik Microsoft Sentinel data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-RubrikWebhookEvents-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. RubrikXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tAnomaliesTableName\n\t\tRansomwareAnalysisTableName\n\t\tThreatHuntsTableName\n\t\tEventsTableName\n\t\tLogLevel\n\t\tlogAnalyticsUri (optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: https://.ods.opinsights.azure.us. \n4. Once all application settings have been entered, click **Save**.""}, {""title"": """", ""description"": ""**Post Deployment steps**\n\n""}, {""title"": ""1) Get the Function app endpoint"", ""description"": ""1. Go to Azure function Overview page and Click on **\""Functions\""** tab.\n2. Click on the function called **\""RubrikHttpStarter\""**.\n3. Go to **\""GetFunctionurl\""** and copy the function url.""}, {""title"": ""2) Add a webhook in RubrikSecurityCloud to send data to Microsoft Sentinel."", ""description"": ""Follow the Rubrik User Guide instructions to [Add a Webhook](https://docs.rubrik.com/en-us/saas/saas/common/adding_webhook.html) to begin receiving event information \n 1. Select the Microsoft Sentinel as the webhook Provider \n 2. Enter the desired Webhook name \n 3. Enter the URL part from copied Function-url as the webhook URL endpoint and replace **{functionname}** with **\""RubrikAnomalyOrchestrator\""**, for the Rubrik Microsoft Sentinel Solution \n 4. Select the EventType as Anomaly \n 5. Select the following severity levels: Critical, Warning, Informational \n 6. Choose multiple log types, if desired, when running **\""RubrikEventsOrchestrator\""** \n 7. Repeat the same steps to add webhooks for Anomaly Detection Analysis, Threat Hunt and Other Events.\n \n\n NOTE: while adding webhooks for Anomaly Detection Analysis, Threat Hunt and Other Events, replace **{functionname}** with **\""RubrikRansomwareOrchestrator\""**, **\""RubrikThreatHuntOrchestrator\""** and **\""RubrikEventsOrchestrator\""** respectively in copied function-url.""}, {""title"": """", ""description"": ""*Now we are done with the rubrik Webhook configuration. Once the webhook events triggered , you should be able to see the Anomaly, Anomaly Detection Analysis, Threat Hunt events and Other Events from the Rubrik into respective LogAnalytics workspace table called \""Rubrik_Anomaly_Data_CL\"", \""Rubrik_Ransomware_Data_CL\"", \""Rubrik_ThreatHunt_Data_CL\"", and \""Rubrik_Events_Data_CL\"".*\n\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/RubrikSecurityCloud/Data%20Connectors/RubrikWebhookEvents/RubrikWebhookEvents_FunctionApp.json","true" +"","SAP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SAP","","","","","","","","","","","","","","","","","","false" +"SAPBTPAuditLog_CL","SAP BTP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SAP%20BTP","sentinel4sap","sap_btp_sentinel_solution","2023-04-04","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","SAPBTPAuditEvents","Microsoft","SAP BTP","SAP Business Technology Platform (SAP BTP) brings together data management, analytics, artificial intelligence, application development, automation, and integration in one, unified environment.","[{""description"": ""**Step 1 - Configuration steps for the SAP BTP Audit Retrieval API**\n\nFollow the steps provided by SAP [see Audit Log Retrieval API for Global Accounts in the Cloud Foundry Environment](https://help.sap.com/docs/btp/sap-business-technology-platform/audit-log-retrieval-api-for-global-accounts-in-cloud-foundry-environment/). Take a note of the **url** (Audit Retrieval API URL), **uaa.url** (User Account and Authentication Server url) and the associated **uaa.clientid**.\n\n>**NOTE:** You can onboard one or more BTP subaccounts by following the steps provided by SAP [see Audit Log Retrieval API Usage for Subaccounts in the Cloud Foundry Environment](https://help.sap.com/docs/btp/sap-business-technology-platform/audit-log-retrieval-api-usage-for-subaccounts-in-cloud-foundry-environment/). Add a connection for each subaccount.""}, {""description"": ""Connect using OAuth client credentials"", ""title"": ""Connect events from SAP BTP to Microsoft Sentinel"", ""instructions"": [{""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""label"": ""Add account"", ""isPrimary"": true, ""title"": ""BTP connection"", ""instructionSteps"": [{""title"": ""Account Details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Subaccount name (e.g. Contoso). This will be projected to the InstanceName column."", ""placeholder"": ""no space or special character allowed!"", ""type"": ""text"", ""name"": ""subaccountName""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""SAP BTP Client ID"", ""placeholder"": ""Client ID"", ""type"": ""text"", ""name"": ""clientId""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""SAP BTP Client Secret"", ""placeholder"": ""Client Secret"", ""type"": ""password"", ""name"": ""clientSecret""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Authorization server URL (UAA server)"", ""placeholder"": ""https://your-tenant.authentication.region.hana.ondemand.com"", ""type"": ""text"", ""name"": ""authServerUrl""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Audit Retrieval API URL"", ""placeholder"": ""https://auditlog-management.cfapps.region.hana.ondemand.com"", ""type"": ""text"", ""name"": ""auditHost""}}]}]}}]}, {""title"": ""Subaccounts"", ""description"": ""Each row represents a connected subaccount"", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Subaccount Name"", ""columnValue"": ""name""}], ""menuItems"": [""DeleteConnector""]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Client Id and Client Secret for Audit Retrieval API"", ""description"": ""Enable API access in BTP.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SAP%20BTP/Data%20Connectors/SAPBTPPollerConnector/SAPBTP_DataConnectorDefinition.json","true" +"SAPETDAlerts_CL","SAP ETD Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SAP%20ETD%20Cloud","sap_jasondau","azure-sentinel-solution-sapetd","2025-02-17","2025-09-11","","SAP","Partner","https://help.sap.com/docs/SAP_ENTERPRISE_THREAT_DETECTION_CLOUD_EDITION","","domains","SAPETDAlerts","SAP","SAP Enterprise Threat Detection, cloud edition","The SAP Enterprise Threat Detection, cloud edition (ETD) data connector enables ingestion of security alerts from ETD into Microsoft Sentinel, supporting cross-correlation, alerting, and threat hunting.","[{""description"": ""**Step 1 - Configuration steps for the SAP ETD Audit Retrieval API**\n\nFollow the steps provided by SAP [see ETD docs](https://help.sap.com/docs/ETD/sap-business-technology-platform/audit-log-retrieval-api-for-global-accounts-in-cloud-foundry-environment/). Take a note of the **url** (Audit Retrieval API URL), **uaa.url** (User Account and Authentication Server url) and the associated **uaa.clientid**.\n\n>**NOTE:** You can onboard one or more ETD subaccounts by following the steps provided by SAP [see ETD docs](https://help.sap.com/docs/ETD/sap-business-technology-platform/audit-log-retrieval-api-usage-for-subaccounts-in-cloud-foundry-environment/). Add a connection for each subaccount.\n\n>**TIP:** Use the [shared blog series](https://community.sap.com/t5/enterprise-resource-planning-blog-posts-by-sap/sap-enterprise-threat-detection-cloud-edition-joins-forces-with-microsoft/ba-p/13942075) for additional info.""}, {""description"": ""Connect using OAuth client credentials"", ""title"": ""Connect events from SAP ETD to Microsoft Sentinel"", ""instructions"": [{""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""label"": ""Add account"", ""isPrimary"": true, ""title"": ""ETD connection"", ""instructionSteps"": [{""title"": ""Account Details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""SAP ETD Client ID"", ""placeholder"": ""Client ID"", ""type"": ""text"", ""name"": ""clientId""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""SAP ETD Client Secret"", ""placeholder"": ""Client Secret"", ""type"": ""password"", ""name"": ""clientSecret""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Authorization server URL (UAA server)"", ""placeholder"": ""https://your-tenant.authentication.region.hana.ondemand.com/oauth/token"", ""type"": ""text"", ""name"": ""authServerUrl""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""SAP ETD data retrieval API URL"", ""placeholder"": ""https://your-etd-cloud-data-retrieval-service.cfapps.region.hana.ondemand.com"", ""type"": ""text"", ""name"": ""etdHost""}}]}]}}]}, {""title"": ""ETD accounts"", ""description"": ""Each row represents a connected ETD account"", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Data retrieval endpoint"", ""columnValue"": ""properties.request.apiEndpoint""}], ""menuItems"": [""DeleteConnector""]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Client Id and Client Secret for ETD Retrieval API"", ""description"": ""Enable API access in ETD.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SAP%20ETD%20Cloud/Data%20Connectors/SAPETD_PUSH_CCP/SAPETD_connectorDefinition.json","true" +"SAPETDInvestigations_CL","SAP ETD Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SAP%20ETD%20Cloud","sap_jasondau","azure-sentinel-solution-sapetd","2025-02-17","2025-09-11","","SAP","Partner","https://help.sap.com/docs/SAP_ENTERPRISE_THREAT_DETECTION_CLOUD_EDITION","","domains","SAPETDAlerts","SAP","SAP Enterprise Threat Detection, cloud edition","The SAP Enterprise Threat Detection, cloud edition (ETD) data connector enables ingestion of security alerts from ETD into Microsoft Sentinel, supporting cross-correlation, alerting, and threat hunting.","[{""description"": ""**Step 1 - Configuration steps for the SAP ETD Audit Retrieval API**\n\nFollow the steps provided by SAP [see ETD docs](https://help.sap.com/docs/ETD/sap-business-technology-platform/audit-log-retrieval-api-for-global-accounts-in-cloud-foundry-environment/). Take a note of the **url** (Audit Retrieval API URL), **uaa.url** (User Account and Authentication Server url) and the associated **uaa.clientid**.\n\n>**NOTE:** You can onboard one or more ETD subaccounts by following the steps provided by SAP [see ETD docs](https://help.sap.com/docs/ETD/sap-business-technology-platform/audit-log-retrieval-api-usage-for-subaccounts-in-cloud-foundry-environment/). Add a connection for each subaccount.\n\n>**TIP:** Use the [shared blog series](https://community.sap.com/t5/enterprise-resource-planning-blog-posts-by-sap/sap-enterprise-threat-detection-cloud-edition-joins-forces-with-microsoft/ba-p/13942075) for additional info.""}, {""description"": ""Connect using OAuth client credentials"", ""title"": ""Connect events from SAP ETD to Microsoft Sentinel"", ""instructions"": [{""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""label"": ""Add account"", ""isPrimary"": true, ""title"": ""ETD connection"", ""instructionSteps"": [{""title"": ""Account Details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""SAP ETD Client ID"", ""placeholder"": ""Client ID"", ""type"": ""text"", ""name"": ""clientId""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""SAP ETD Client Secret"", ""placeholder"": ""Client Secret"", ""type"": ""password"", ""name"": ""clientSecret""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Authorization server URL (UAA server)"", ""placeholder"": ""https://your-tenant.authentication.region.hana.ondemand.com/oauth/token"", ""type"": ""text"", ""name"": ""authServerUrl""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""SAP ETD data retrieval API URL"", ""placeholder"": ""https://your-etd-cloud-data-retrieval-service.cfapps.region.hana.ondemand.com"", ""type"": ""text"", ""name"": ""etdHost""}}]}]}}]}, {""title"": ""ETD accounts"", ""description"": ""Each row represents a connected ETD account"", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Data retrieval endpoint"", ""columnValue"": ""properties.request.apiEndpoint""}], ""menuItems"": [""DeleteConnector""]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Client Id and Client Secret for ETD Retrieval API"", ""description"": ""Enable API access in ETD.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SAP%20ETD%20Cloud/Data%20Connectors/SAPETD_PUSH_CCP/SAPETD_connectorDefinition.json","true" +"SAPLogServ_CL","SAP LogServ","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SAP%20LogServ","sap_jasondau","azure-sentinel-solution-saplogserv","2025-02-17","2025-07-18","","SAP","Partner","https://community.sap.com/t5/enterprise-resource-planning-blogs-by-sap/announcing-limited-preview-of-sap-logserv-integration-with-microsoft/ba-p/13942180","","domains","SAPLogServ","SAP SE","SAP LogServ (RISE), S/4HANA Cloud private edition","SAP LogServ is an SAP Enterprise Cloud Services (ECS) service aimed at collection, storage, forwarding and access of logs. LogServ centralizes the logs from all systems, applications, and ECS services used by a registered customer.
Main Features include:
Near Realtime Log Collection: With ability to integrate into Microsoft Sentinel as SIEM solution.
LogServ complements the existing SAP application layer threat monitoring and detections in Microsoft Sentinel with the log types owned by SAP ECS as the system provider. This includes logs like: SAP Security Audit Log (AS ABAP), HANA database, AS JAVA, ICM, SAP Web Dispatcher, SAP Cloud Connector, OS, SAP Gateway, 3rd party Database, Network, DNS, Proxy, Firewall","[{""title"": ""1. Create ARM Resources and Provide the Required Permissions"", ""description"": ""We will create data collection rule (DCR) and data collection endpoint (DCE) resources. We will also create a Microsoft Entra app registration and assign the required permissions to it."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Automated deployment of Azure resources\nClicking on \""Deploy push connector resources\"" will trigger the creation of DCR and DCE resources.\nIt will then create a Microsoft Entra app registration with client secret and grant permissions on the DCR. This setup enables data to be sent securely to the DCR using a OAuth v2 client credentials.""}}, {""parameters"": {""label"": ""Deploy push connector resources"", ""applicationDisplayName"": ""SAP LogServ push to Microsoft Sentinel""}, ""type"": ""DeployPushConnectorButton_test""}]}, {""title"": ""2. Maintain the data collection endpoint details and authentication info in SAP LogServ"", ""description"": ""Share the data collection endpoint URL and authentication info with the SAP LogServ administrator to configure the SAP LogServ to send data to the data collection endpoint.\n\nLearn more from [this blog series](https://community.sap.com/t5/enterprise-resource-planning-blog-posts-by-members/ultimate-blog-series-sap-logserv-integration-with-microsoft-sentinel/ba-p/14126401)."", ""instructions"": [{""parameters"": {""label"": ""Use this value to configure as Tenant ID in the LogIngestionAPI credential."", ""fillWith"": [""TenantId""]}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra Application ID"", ""fillWith"": [""ApplicationId""], ""placeholder"": ""Deploy push connector to get the Application ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra Application Secret"", ""fillWith"": [""ApplicationSecret""], ""placeholder"": ""Deploy push connector to get the Application Secret""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Use this value to configure the LogsIngestionURL parameter when deploying the IFlow."", ""fillWith"": [""DataCollectionEndpoint""], ""placeholder"": ""Deploy push connector to get the DCE URI""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""DCR Immutable ID"", ""fillWith"": [""DataCollectionRuleId""], ""placeholder"": ""Deploy push connector to get the DCR ID""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft Entra"", ""description"": ""Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher.""}, {""name"": ""Microsoft Azure"", ""description"": ""Permission to assign Monitoring Metrics Publisher role on data collection rules. Typically requires Azure RBAC Owner or User Access Administrator role.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SAP%20LogServ/Data%20Connectors/SAPLogServ.json;https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SAP%20LogServ/Data%20Connectors/SAPLogServ_PUSH_CCP/SAPLogServ_connectorDefinition.json","false" +"ABAPAuditLog","SAP S4 Cloud Public Edition","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SAP%20S4%20Cloud%20Public%20Edition","sap_jasondau","azure-sentinel-solution-s4hana-public","2025-09-12","","","SAP","Partner","https://api.sap.com/api/SecurityAuditLog_ODataService/overview","","domains","SAPS4PublicAlerts","SAP","SAP S/4HANA Cloud Public Edition","The SAP S/4HANA Cloud Public Edition (GROW with SAP) data connector enables ingestion of SAP's security audit log into the Microsoft Sentinel Solution for SAP, supporting cross-correlation, alerting, and threat hunting. Looking for alternative authentication mechanisms? See [here](https://github.com/Azure-Samples/Sentinel-For-SAP-Community/tree/main/integration-artifacts).","[{""description"": ""**Step 1 - Configuration steps for SAP S/4HANA Cloud Public Edition**\n\nTo connect to SAP S/4HANA Cloud Public Edition, you will need:\n\n1. Configure a communication arrangement for communication scenario **[SAP_COM_0750](https://help.sap.com/docs/SAP_S4HANA_CLOUD/0f69f8fb28ac4bf48d2b57b9637e81fa/a93dca70e2ce43d19ac93e3e5531e37d.html)** \n\n2. SAP S/4HANA Cloud Public Edition tenant **API URL**\n3. Valid **communication user (username and password)** for your SAP S/4HANA Cloud system\n4. **Appropriate authorizations** to access audit log data via OData services\n\n>**NOTE:** This connector supports Basic authentication. Looking for alternative authentication mechanisms? See [here](https://github.com/Azure-Samples/Sentinel-For-SAP-Community/tree/main/integration-artifacts)""}, {""description"": ""Connect using Basic authentication"", ""title"": ""Connect events from SAP S/4HANA Cloud Public Edition to Microsoft Sentinel Solution for SAP"", ""instructions"": [{""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""label"": ""Add account"", ""isPrimary"": true, ""title"": ""S/4HANA Cloud Public Edition connection"", ""instructionSteps"": [{""title"": ""Account Details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Username"", ""placeholder"": ""Enter your SAP S/4HANA Cloud username"", ""type"": ""text"", ""name"": ""username""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Password"", ""placeholder"": ""Enter your SAP S/4HANA Cloud password"", ""type"": ""password"", ""name"": ""password""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""SAP S/4HANA Cloud API URL"", ""placeholder"": ""https://my123456-api.s4hana.cloud.sap"", ""type"": ""text"", ""name"": ""s4hanaHost""}}]}]}}]}, {""title"": ""S/4HANA Cloud Public Edition connections"", ""description"": ""Each row represents a connected S/4HANA Cloud Public Edition system"", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""S/4HANA Cloud API endpoint"", ""columnValue"": ""properties.request.apiEndpoint""}], ""menuItems"": [""DeleteConnector""]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Client Id and Client Secret for Audit Retrieval API"", ""description"": ""Enable API access in BTP.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SAP%20S4%20Cloud%20Public%20Edition/Data%20Connectors/SAPS4PublicPollerConnector/SAPS4Public_connectorDefinition.json","true" +"SIGNL4_CL","SIGNL4","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SIGNL4","derdack","azure-sentinel-solution-signl4","2021-12-10","2021-12-10","","Derdack","Partner","https://www.signl4.com","","domains","DerdackSIGNL4","Derdack","Derdack SIGNL4","When critical systems fail or security incidents happen, SIGNL4 bridges the ‘last mile’ to your staff, engineers, IT admins and workers in the field. It adds real-time mobile alerting to your services, systems, and processes in no time. SIGNL4 notifies through persistent mobile push, SMS text and voice calls with acknowledgement, tracking and escalation. Integrated duty and shift scheduling ensure the right people are alerted at the right time.

[Learn more >](https://www.signl4.com)","[{""title"": """", ""description"": "">**NOTE:** This data connector is mainly configured on the SIGNL4 side. You can find a description video here: [**Integrate SIGNL4 with Microsoft Sentinel**](https://www.signl4.com/blog/portfolio_item/azure-sentinel-mobile-alert-notification-duty-schedule-escalation/)."", ""instructions"": []}, {""title"": """", ""description"": "">**SIGNL4 Connector:** The SIGNL4 connector for Microsoft Sentinel, Azure Security Center and other Azure Graph Security API providers provides seamless 2-way integration with your Azure Security solutions. Once added to your SIGNL4 team, the connector will read security alerts from Azure Graph Security API and fully automatically and trigger alert notifications to your team members on duty. It will also synchronize the alert status from SIGNL4 to Graph Security API, so that if alerts are acknowledged or closed, this status is also updated on the according Azure Graph Security API alert or the corresponding security provider. As mentioned, the connector mainly uses Azure Graph Security API, but for some security providers, such as Microsoft Sentinel, it also uses dedicated REST APIs from according Azure solutions."", ""instructions"": []}, {""title"": ""Microsoft Sentinel Features"", ""description"": ""Microsoft Sentinel is a cloud native SIEM solution from Microsoft and a security alert provider in Azure Graph Security API. However, the level of alert details available with the Graph Security API is limited for Microsoft Sentinel. The connector can therefore augment alerts with further details (insights rule search results), from the underlying Microsoft Sentinel Log Analytics workspace. To be able to do that, the connector communicates with Azure Log Analytics REST API and needs according permissions (see below). Furthermore, the app can also update the status of Microsoft Sentinel incidents, when all related security alerts are e.g. in progress or resolved. In order to be able to do that, the connector needs to be a member of the 'Microsoft Sentinel Contributors' group in your Azure Subscription.\n **Automated deployment in Azure**\n The credentials required to access the beforementioned APIs, are generated by a small PowerShell script that you can download below. The script performs the following tasks for you:\n - Logs you on to your Azure Subscription (please login with an administrator account)\n - Creates a new enterprise application for this connector in your Azure AD, also referred to as service principal\n - Creates a new role in your Azure IAM that grants read/query permission to only Azure Log Analytics workspaces.\n - Joins the enterprise application to that user role\n - Joins the enterprise application to the 'Microsoft Sentinel Contributors' role\n - Outputs some data that you need to configure app (see below)"", ""instructions"": []}, {""title"": ""Deployment procedure"", ""description"": ""1. Download the PowerShell deployment script from [here](https://github.com/signl4/signl4-integration-azuresentinel/blob/master/registerSIGNL4Client.ps1).\n2. Review the script and the roles and permission scopes it deploys for the new app registration. If you don't want to use the connector with Microsoft Sentinel, you could remove all role creation and role assignment code and only use it to create the app registration (SPN) in your Azure Active Directory.\n3. Run the script. At the end it outputs information that you need to enter in the connector app configuration.\n4. In Azure AD, click on 'App Registrations'. Find the app with the name 'SIGNL4AzureSecurity' and open its details\n5. On the left menu blade click 'API Permissions'. Then click 'Add a permission'.\n6. On the blade that loads, under 'Microsoft APIs' click on the 'Microsoft Graph' tile, then click 'App permission'.\n7. In the table that is displayed expand 'SecurityEvents' and check 'SecurityEvents.Read.All' and 'SecurityEvents.ReadWrite.All'.\n8. Click 'Add permissions'."", ""instructions"": []}, {""title"": ""Configuring the SIGNL4 connector app"", ""description"": ""Finally, enter the IDs, that the script has outputted in the connector configuration:\n - Azure Tenant ID\n - Azure Subscription ID\n - Client ID (of the enterprise application)\n - Client Secret (of the enterprise application)\n Once the app is enabled, it will start reading your Azure Graph Security API alerts.\n\n>**NOTE:** It will initially only read the alerts that have occurred within the last 24 hours."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SIGNL4/Data%20Connectors/DerdackSIGNL4.json","true" +"SecurityIncident","SIGNL4","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SIGNL4","derdack","azure-sentinel-solution-signl4","2021-12-10","2021-12-10","","Derdack","Partner","https://www.signl4.com","","domains","DerdackSIGNL4","Derdack","Derdack SIGNL4","When critical systems fail or security incidents happen, SIGNL4 bridges the ‘last mile’ to your staff, engineers, IT admins and workers in the field. It adds real-time mobile alerting to your services, systems, and processes in no time. SIGNL4 notifies through persistent mobile push, SMS text and voice calls with acknowledgement, tracking and escalation. Integrated duty and shift scheduling ensure the right people are alerted at the right time.

[Learn more >](https://www.signl4.com)","[{""title"": """", ""description"": "">**NOTE:** This data connector is mainly configured on the SIGNL4 side. You can find a description video here: [**Integrate SIGNL4 with Microsoft Sentinel**](https://www.signl4.com/blog/portfolio_item/azure-sentinel-mobile-alert-notification-duty-schedule-escalation/)."", ""instructions"": []}, {""title"": """", ""description"": "">**SIGNL4 Connector:** The SIGNL4 connector for Microsoft Sentinel, Azure Security Center and other Azure Graph Security API providers provides seamless 2-way integration with your Azure Security solutions. Once added to your SIGNL4 team, the connector will read security alerts from Azure Graph Security API and fully automatically and trigger alert notifications to your team members on duty. It will also synchronize the alert status from SIGNL4 to Graph Security API, so that if alerts are acknowledged or closed, this status is also updated on the according Azure Graph Security API alert or the corresponding security provider. As mentioned, the connector mainly uses Azure Graph Security API, but for some security providers, such as Microsoft Sentinel, it also uses dedicated REST APIs from according Azure solutions."", ""instructions"": []}, {""title"": ""Microsoft Sentinel Features"", ""description"": ""Microsoft Sentinel is a cloud native SIEM solution from Microsoft and a security alert provider in Azure Graph Security API. However, the level of alert details available with the Graph Security API is limited for Microsoft Sentinel. The connector can therefore augment alerts with further details (insights rule search results), from the underlying Microsoft Sentinel Log Analytics workspace. To be able to do that, the connector communicates with Azure Log Analytics REST API and needs according permissions (see below). Furthermore, the app can also update the status of Microsoft Sentinel incidents, when all related security alerts are e.g. in progress or resolved. In order to be able to do that, the connector needs to be a member of the 'Microsoft Sentinel Contributors' group in your Azure Subscription.\n **Automated deployment in Azure**\n The credentials required to access the beforementioned APIs, are generated by a small PowerShell script that you can download below. The script performs the following tasks for you:\n - Logs you on to your Azure Subscription (please login with an administrator account)\n - Creates a new enterprise application for this connector in your Azure AD, also referred to as service principal\n - Creates a new role in your Azure IAM that grants read/query permission to only Azure Log Analytics workspaces.\n - Joins the enterprise application to that user role\n - Joins the enterprise application to the 'Microsoft Sentinel Contributors' role\n - Outputs some data that you need to configure app (see below)"", ""instructions"": []}, {""title"": ""Deployment procedure"", ""description"": ""1. Download the PowerShell deployment script from [here](https://github.com/signl4/signl4-integration-azuresentinel/blob/master/registerSIGNL4Client.ps1).\n2. Review the script and the roles and permission scopes it deploys for the new app registration. If you don't want to use the connector with Microsoft Sentinel, you could remove all role creation and role assignment code and only use it to create the app registration (SPN) in your Azure Active Directory.\n3. Run the script. At the end it outputs information that you need to enter in the connector app configuration.\n4. In Azure AD, click on 'App Registrations'. Find the app with the name 'SIGNL4AzureSecurity' and open its details\n5. On the left menu blade click 'API Permissions'. Then click 'Add a permission'.\n6. On the blade that loads, under 'Microsoft APIs' click on the 'Microsoft Graph' tile, then click 'App permission'.\n7. In the table that is displayed expand 'SecurityEvents' and check 'SecurityEvents.Read.All' and 'SecurityEvents.ReadWrite.All'.\n8. Click 'Add permissions'."", ""instructions"": []}, {""title"": ""Configuring the SIGNL4 connector app"", ""description"": ""Finally, enter the IDs, that the script has outputted in the connector configuration:\n - Azure Tenant ID\n - Azure Subscription ID\n - Client ID (of the enterprise application)\n - Client Secret (of the enterprise application)\n Once the app is enabled, it will start reading your Azure Graph Security API alerts.\n\n>**NOTE:** It will initially only read the alerts that have occurred within the last 24 hours."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SIGNL4/Data%20Connectors/DerdackSIGNL4.json","true" +"SINECSecurityGuard_CL","SINEC Security Guard","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SINEC%20Security%20Guard","siemensplmsoftware","azure-sentinel-solution-ssg","2024-07-15","","","Siemens AG","Partner","https://siemens.com/sinec-security-guard","","domains,verticals","SSG","Siemens AG","SINEC Security Guard","The SINEC Security Guard solution for Microsoft Sentinel allows you to ingest security events of your industrial networks from the [SINEC Security Guard](https://siemens.com/sinec-security-guard) into Microsoft Sentinel","[{""description"": ""This Data Connector relies on the SINEC Security Guard Sensor Package to be able to receive Sensor events in Microsoft Sentinel. The Sensor Package can be purchased in the Siemens Xcelerator Marketplace."", ""instructions"": [{""parameters"": {""title"": ""1. Please follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Set up the SINEC Security Guard Sensor"", ""description"": ""Detailed step for setting up the sensor.""}, {""title"": ""Create the Data Connector and configure it in the SINEC Security Guard web interface"", ""description"": ""Instructions on configuring the data connector.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SINEC%20Security%20Guard/Data%20Connectors/data_connector_GenericUI.json","true" +"","SOC Handbook","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SOC%20Handbook","microsoftsentinelcommunity","azure-sentinel-solution-sochandbook","2022-11-30","","","Community","Community","https://github.com/Azure/Azure-Sentinel/issues","","domains","","","","","","","","false" +"SOCPrimeAuditLogs_CL","SOC Prime CCF","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SOC%20Prime%20CCF","socprime","azure-sentinel-solution-socprimeauditccp","2025-09-25","","","SOC Prime","Partner","https://socprime.com/","","domains","SOCPrimeAuditLogsDataConnector","Microsoft","SOC Prime Platform Audit Logs Data Connector","The [SOC Prime Audit Logs](https://help.socprime.com/en/articles/6265791-api) data connector allows ingesting logs from the SOC Prime Platform API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the SOC Prime Platform API to fetch SOC Prime platform audit logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table, thus resulting in better performance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the SOC Prime Platform API \n Follow the instructions to obtain the credentials. you can also follow this [guide](https://help.socprime.com/en/articles/6265791-api#h_8a0d20b204) to generate personal API key.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### Retrieve API Key\n 1. Log in to the SOC Prime Platform\n 2. Click [**Account**] icon -> [**Platform Settings**] -> [**API**] \n 3. Click [**Add New Key**] \n 4. In the modal that appears give your key a meaningful name, set expiration date and product APIs the key provides access to \n 5. Click on [**Generate**] \n 6. Copy the key and save it in a safe place. You won't be able to view it again once you close this modal ""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""SOC Prime API Key"", ""placeholder"": ""API Key"", ""type"": ""password"", ""name"": ""apitoken""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SOC%20Prime%20CCF/Data%20Connectors/SOCPrime_ccp/SOCPrime_DataConnectorDefinition.json","true" +"","SOC-Process-Framework","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SOC-Process-Framework","azuresentinel","azure-sentinel-solution-socprocessframework","2022-04-08","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"SailPointIDN_Events_CL","SailPointIdentityNow","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SailPointIdentityNow","sailpoint1582673310610","sentinel_offering","2021-10-26","","","SailPoint","Partner","","","domains","SailPointIdentityNow","SailPoint","SailPoint IdentityNow","The [SailPoint](https://www.sailpoint.com/) IdentityNow data connector provides the capability to ingest [SailPoint IdentityNow] search events into Microsoft Sentinel through the REST API. The connector provides customers the ability to extract audit information from their IdentityNow tenant. It is intended to make it even easier to bring IdentityNow user activity and governance events into Microsoft Sentinel to improve insights from your security incident and event monitoring solution.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the SailPoint IdentityNow REST API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the SailPoint IdentityNow API**\n\n [Follow the instructions](https://community.sailpoint.com/t5/IdentityNow-Articles/Best-Practice-Using-Personal-Access-Tokens-in-IdentityNow/ta-p/150471) to obtain the credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the SailPoint IdentityNow data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the SailPoint IdentityNow data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-sailpointidentitynow-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter other information and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the SailPoint IdentityNow data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-sailpointidentitynow-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. searcheventXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.9.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tTENANT_ID\n\t\tSHARED_KEY\n\t\tLIMIT\n\t\tGRANT_TYPE\n\t\tCUSTOMER_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tAZURE_STORAGE_ACCESS_KEY\n\t\tAZURE_STORAGE_ACCOUNT_NAME\n\t\tAzureWebJobsStorage\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""SailPoint IdentityNow API Authentication Credentials"", ""description"": ""TENANT_ID, CLIENT_ID and CLIENT_SECRET are required for authentication.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SailPointIdentityNow/Data%20Connectors/SailPoint_IdentityNow_FunctionApp.json","true" +"SailPointIDN_Triggers_CL","SailPointIdentityNow","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SailPointIdentityNow","sailpoint1582673310610","sentinel_offering","2021-10-26","","","SailPoint","Partner","","","domains","SailPointIdentityNow","SailPoint","SailPoint IdentityNow","The [SailPoint](https://www.sailpoint.com/) IdentityNow data connector provides the capability to ingest [SailPoint IdentityNow] search events into Microsoft Sentinel through the REST API. The connector provides customers the ability to extract audit information from their IdentityNow tenant. It is intended to make it even easier to bring IdentityNow user activity and governance events into Microsoft Sentinel to improve insights from your security incident and event monitoring solution.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the SailPoint IdentityNow REST API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the SailPoint IdentityNow API**\n\n [Follow the instructions](https://community.sailpoint.com/t5/IdentityNow-Articles/Best-Practice-Using-Personal-Access-Tokens-in-IdentityNow/ta-p/150471) to obtain the credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the SailPoint IdentityNow data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the SailPoint IdentityNow data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-sailpointidentitynow-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter other information and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the SailPoint IdentityNow data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-sailpointidentitynow-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. searcheventXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.9.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tTENANT_ID\n\t\tSHARED_KEY\n\t\tLIMIT\n\t\tGRANT_TYPE\n\t\tCUSTOMER_ID\n\t\tCLIENT_ID\n\t\tCLIENT_SECRET\n\t\tAZURE_STORAGE_ACCESS_KEY\n\t\tAZURE_STORAGE_ACCOUNT_NAME\n\t\tAzureWebJobsStorage\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""SailPoint IdentityNow API Authentication Credentials"", ""description"": ""TENANT_ID, CLIENT_ID and CLIENT_SECRET are required for authentication.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SailPointIdentityNow/Data%20Connectors/SailPoint_IdentityNow_FunctionApp.json","true" +"","SalemCyber","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SalemCyber","saleminc1627928803559","salem-cyber-ai-analyst","2023-07-21","2023-07-21","","Salem Cyber","Partner","https://www.salemcyber.com/contact","","domains","","","","","","","","false" +"SalesforceServiceCloudV2_CL","Salesforce Service Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Salesforce%20Service%20Cloud","azuresentinel","azure-sentinel-solution-salesforceservicecloud","2022-05-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","SalesforceServiceCloud","Salesforce","[DEPRECATED] Salesforce Service Cloud","The Salesforce Service Cloud data connector provides the capability to ingest information about your Salesforce operational events into Microsoft Sentinel through the REST API. The connector provides ability to review events in your org on an accelerated basis, get [event log files](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/event_log_file_hourly_overview.htm) in hourly increments for recent activity.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Salesforce Lightning Platform REST API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias SalesforceServiceCloud and load the function code or click [here](https://aka.ms/sentinel-SalesforceServiceCloud-parser). The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Salesforce Lightning Platform REST API**\n\n1. See the [link](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/quickstart.htm) and follow the instructions for obtaining Salesforce API Authorization credentials. \n2. On the **Set Up Authorization** step choose **Session ID Authorization** method.\n3. You must provide your client id, client secret, username, and password with user security token.""}, {""title"": """", ""description"": "">**NOTE:** Ingesting data from on an hourly interval may require additional licensing based on the edition of the Salesforce Service Cloud being used. Please refer to [Salesforce documentation](https://www.salesforce.com/editions-pricing/service-cloud/) and/or support for more details.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Salesforce Service Cloud data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Salesforce API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Salesforce Service Cloud data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SalesforceServiceCloud-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **Salesforce API Username**, **Salesforce API Password**, **Salesforce Security Token**, **Salesforce Consumer Key**, **Salesforce Consumer Secret** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Salesforce Service Cloud data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-SalesforceServiceCloud-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tSalesforceUser\n\t\tSalesforcePass\n\t\tSalesforceSecurityToken\n\t\tSalesforceConsumerKey\n\t\tSalesforceConsumerSecret\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`\n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Salesforce API Username**, **Salesforce API Password**, **Salesforce Security Token**, **Salesforce Consumer Key**, **Salesforce Consumer Secret** is required for REST API. [See the documentation to learn more about API](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/quickstart.htm).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Salesforce%20Service%20Cloud/Data%20Connectors/SalesforceServiceCloud_API_FunctionApp.json","true" +"SalesforceServiceCloud_CL","Salesforce Service Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Salesforce%20Service%20Cloud","azuresentinel","azure-sentinel-solution-salesforceservicecloud","2022-05-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","SalesforceServiceCloud","Salesforce","[DEPRECATED] Salesforce Service Cloud","The Salesforce Service Cloud data connector provides the capability to ingest information about your Salesforce operational events into Microsoft Sentinel through the REST API. The connector provides ability to review events in your org on an accelerated basis, get [event log files](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/event_log_file_hourly_overview.htm) in hourly increments for recent activity.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Salesforce Lightning Platform REST API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias SalesforceServiceCloud and load the function code or click [here](https://aka.ms/sentinel-SalesforceServiceCloud-parser). The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Salesforce Lightning Platform REST API**\n\n1. See the [link](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/quickstart.htm) and follow the instructions for obtaining Salesforce API Authorization credentials. \n2. On the **Set Up Authorization** step choose **Session ID Authorization** method.\n3. You must provide your client id, client secret, username, and password with user security token.""}, {""title"": """", ""description"": "">**NOTE:** Ingesting data from on an hourly interval may require additional licensing based on the edition of the Salesforce Service Cloud being used. Please refer to [Salesforce documentation](https://www.salesforce.com/editions-pricing/service-cloud/) and/or support for more details.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Salesforce Service Cloud data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Salesforce API Authorization credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Salesforce Service Cloud data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SalesforceServiceCloud-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **Salesforce API Username**, **Salesforce API Password**, **Salesforce Security Token**, **Salesforce Consumer Key**, **Salesforce Consumer Secret** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Salesforce Service Cloud data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-SalesforceServiceCloud-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tSalesforceUser\n\t\tSalesforcePass\n\t\tSalesforceSecurityToken\n\t\tSalesforceConsumerKey\n\t\tSalesforceConsumerSecret\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`\n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Salesforce API Username**, **Salesforce API Password**, **Salesforce Security Token**, **Salesforce Consumer Key**, **Salesforce Consumer Secret** is required for REST API. [See the documentation to learn more about API](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/quickstart.htm).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Salesforce%20Service%20Cloud/Data%20Connectors/SalesforceServiceCloud_API_FunctionApp.json","true" +"SalesforceServiceCloudV2_CL","Salesforce Service Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Salesforce%20Service%20Cloud","azuresentinel","azure-sentinel-solution-salesforceservicecloud","2022-05-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","SalesforceServiceCloudCCPDefinition","Microsoft","Salesforce Service Cloud (via Codeless Connector Framework)","The Salesforce Service Cloud data connector provides the capability to ingest information about your Salesforce operational events into Microsoft Sentinel through the REST API. The connector provides ability to review events in your org on an accelerated basis, get [event log files](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/event_log_file_hourly_overview.htm) in hourly increments for recent activity.","[{""title"": ""Connect to Salesforce Service Cloud API to start collecting event logs in Microsoft Sentinel"", ""description"": ""Follow [Create a Connected App in Salesforce for OAuth](https://help.salesforce.com/s/articleView?id=platform.ev_relay_create_connected_app.htm&type=5) and [Configure a Connected App for the OAuth 2.0 Client Credentials Flow](https://help.salesforce.com/s/articleView?id=xcloud.connected_app_client_credentials_setup.htm&type=5) to create a Connected App with access to the Salesforce Service Cloud API. Through those instructions, you should get the Consumer Key and Consumer Secret.\n For Salesforce Domain name, Go to Setup, type My Domain in the Quick Find box, and select My Domain to view your domain details. Make sure to enter the domain name without a trailing slash (e.g., https://your-domain.my.salesforce.com). Fill the form below with that information."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Salesforce Domain Name"", ""placeholder"": ""Salesforce Domain Name"", ""type"": ""text"", ""name"": ""salesforceDomainName"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Log Collection Interval"", ""name"": ""queryType"", ""options"": [{""key"": ""SELECT Id,EventType,LogDate,Interval,CreatedDate,LogFile,LogFileLength FROM EventLogFile WHERE Interval='Hourly' and CreatedDate>{_QueryWindowStartTime} and CreatedDate<{_QueryWindowEndTime}"", ""text"": ""Hourly""}, {""key"": ""SELECT Id,EventType,LogDate,CreatedDate,LogFile,LogFileLength FROM EventLogFile WHERE CreatedDate>{_QueryWindowStartTime} and CreatedDate<{_QueryWindowEndTime}"", ""text"": ""Daily""}], ""placeholder"": ""Select an interval type"", ""isMultiSelect"": false, ""required"": true}}, {""type"": ""OAuthForm"", ""parameters"": {""clientIdLabel"": ""Consumer Key"", ""clientSecretLabel"": ""Consumer Secret"", ""clientIdPlaceholder"": ""Enter Connected App Consumer Key"", ""clientSecretPlaceholder"": ""Enter Connected App Consumer Secret"", ""connectButtonLabel"": ""Connect"", ""disconnectButtonLabel"": ""Disconnect""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true, ""action"": false}}], ""customs"": [{""name"": ""Salesforce Service Cloud API access"", ""description"": ""Access to the Salesforce Service Cloud API through a Connected App is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Salesforce%20Service%20Cloud/Data%20Connectors/SalesforceSentinelConnector_CCP/SalesforceServiceCloud_DataConnectorDefinition.json","true" +"Samsung_Knox_Application_CL","Samsung Knox Asset Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Samsung%20Knox%20Asset%20Intelligence","samsungelectronics1632791654245","azure-sentinel-solution-samsung-knox-kai","2025-01-15","","","Samsung Electronics Co., Ltd.","Partner","https://www2.samsungknox.com/en/support","","domains","SamsungDCDefinition","Samsung","Samsung Knox Asset Intelligence","Samsung Knox Asset Intelligence Data Connector lets you centralize your mobile security events and logs in order to view customized insights using the Workbook template, and identify incidents based on Analytics Rules templates.","[{""title"": """", ""description"": ""This Data Connector uses the Microsoft Log Ingestion API to push security events into Microsoft Sentinel from Samsung Knox Asset Intelligence (KAI) solution.""}, {""title"": ""STEP 1 - Create and register an Entra Application"", ""description"": "">**Note**: This Data Connector can support either Certificate-based or Client Secret-based authentication. For Certificate-based authentication, you can download the Samsung CA-signed certificate (public key) from [KAI documentation portal](https://docs.samsungknox.com/admin/knox-asset-intelligence/assets/samsung-knox-validation-certificate.crt). For Client Secret-based authentication, you can create the secret during the Entra application registration. Ensure you copy the Client Secret value as soon as it is generated.\n\n>**IMPORTANT:** Save the values for Tenant (Directory) ID and Client (Application) ID. If Client Secret-based authentication is enabled, save Client Secret (Secret Value) associated with the Entra app.""}, {""title"": ""STEP 2 - Automate deployment of this Data Connector using the below Azure Resource Manager (ARM) template"", ""description"": "">**IMPORTANT:** Before deploying the Data Connector, copy the below Workspace name associated with your Microsoft Sentinel (also your Log Analytics) instance."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceName""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": ""1. Click the button below to install Samsung Knox Intelligence Solution. \n\n\t[![DeployToAzure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SamsungDCDefinition-azuredeploy)\\n2. Provide the following required fields: Log Analytics Workspace Name, Log Analytics Workspace Location, Log Analytics Workspace Subscription (ID) and Log Analytics Workspace Resource Group.""}, {""title"": ""STEP 3 - Obtain Microsoft Sentinel Data Collection details"", ""description"": ""Once the ARM template is deployed, navigate to Data Collection Rules https://portal.azure.com/#browse/microsoft.insights%2Fdatacollectionrules? and save values associated with the Immutable ID (DCR) and Data Collection Endpoint (DCE). \n\n>**IMPORTANT:** To enable end-to-end integration, information related to Microsoft Sentinel DCE and DCR are required for configuration in Samsung Knox Asset Intelligence portal (STEP 4). \n\nEnsure the Entra Application created in STEP 1 has permissions to use the DCR created in order to send data to the DCE. Please refer to https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#assign-permissions-to-the-dcr to assign permissions accordingly.""}, {""title"": ""STEP 4 - Connect to Samsung Knox Asset Intelligence solution to configure Microsoft Sentinel to push select Knox Security Events as Alerts"", ""description"": ""1. Login to [Knox Asset Intelligence administration portal](https://central.samsungknox.com/kaiadmin/dai/home) and navigate to **Dashboard Settings**; this is available at the top-right corner of the Portal.\n> **Note**: Ensure the login user has access to 'Security' and 'Manage dashboard view and data collection' permissions.\n\n2. Click on Security tab to view settings for Microsoft Sentinel Integration and Knox Security Logs.\n\n3. In the Security Operations Integration page, toggle on **'Enable Microsoft Sentinel Integration'** and enter appropriate values in the required fields.\n\n >a. Based on the authentication method used, refer to information saved from STEP 1 while registering the Entra application. \n\n >b. For Microsoft Sentinel DCE and DCR, refer to the information saved from STEP 3. \n\n4. Click on **'Test Connection'** and ensure the connection is successful.\n\n5. Before you can Save, configure Knox Security Logs by selecting either Essential or Advanced configuration **(default: Essential).**\n\n6. To complete the Microsoft Sentinel integration, click **'Save'**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Entra app"", ""description"": ""An Entra app needs to be registered and provisioned with \u2018Microsoft Metrics Publisher\u2019 role and configured with either Certificate or Client Secret as credentials for secure data transfer. See [the Log ingestion tutorial to learn more about Entra App creation, registration and credential configuration.](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal) ""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Samsung%20Knox%20Asset%20Intelligence/Data%20Connectors/Template_Samsung.json","true" +"Samsung_Knox_Audit_CL","Samsung Knox Asset Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Samsung%20Knox%20Asset%20Intelligence","samsungelectronics1632791654245","azure-sentinel-solution-samsung-knox-kai","2025-01-15","","","Samsung Electronics Co., Ltd.","Partner","https://www2.samsungknox.com/en/support","","domains","SamsungDCDefinition","Samsung","Samsung Knox Asset Intelligence","Samsung Knox Asset Intelligence Data Connector lets you centralize your mobile security events and logs in order to view customized insights using the Workbook template, and identify incidents based on Analytics Rules templates.","[{""title"": """", ""description"": ""This Data Connector uses the Microsoft Log Ingestion API to push security events into Microsoft Sentinel from Samsung Knox Asset Intelligence (KAI) solution.""}, {""title"": ""STEP 1 - Create and register an Entra Application"", ""description"": "">**Note**: This Data Connector can support either Certificate-based or Client Secret-based authentication. For Certificate-based authentication, you can download the Samsung CA-signed certificate (public key) from [KAI documentation portal](https://docs.samsungknox.com/admin/knox-asset-intelligence/assets/samsung-knox-validation-certificate.crt). For Client Secret-based authentication, you can create the secret during the Entra application registration. Ensure you copy the Client Secret value as soon as it is generated.\n\n>**IMPORTANT:** Save the values for Tenant (Directory) ID and Client (Application) ID. If Client Secret-based authentication is enabled, save Client Secret (Secret Value) associated with the Entra app.""}, {""title"": ""STEP 2 - Automate deployment of this Data Connector using the below Azure Resource Manager (ARM) template"", ""description"": "">**IMPORTANT:** Before deploying the Data Connector, copy the below Workspace name associated with your Microsoft Sentinel (also your Log Analytics) instance."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceName""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": ""1. Click the button below to install Samsung Knox Intelligence Solution. \n\n\t[![DeployToAzure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SamsungDCDefinition-azuredeploy)\\n2. Provide the following required fields: Log Analytics Workspace Name, Log Analytics Workspace Location, Log Analytics Workspace Subscription (ID) and Log Analytics Workspace Resource Group.""}, {""title"": ""STEP 3 - Obtain Microsoft Sentinel Data Collection details"", ""description"": ""Once the ARM template is deployed, navigate to Data Collection Rules https://portal.azure.com/#browse/microsoft.insights%2Fdatacollectionrules? and save values associated with the Immutable ID (DCR) and Data Collection Endpoint (DCE). \n\n>**IMPORTANT:** To enable end-to-end integration, information related to Microsoft Sentinel DCE and DCR are required for configuration in Samsung Knox Asset Intelligence portal (STEP 4). \n\nEnsure the Entra Application created in STEP 1 has permissions to use the DCR created in order to send data to the DCE. Please refer to https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#assign-permissions-to-the-dcr to assign permissions accordingly.""}, {""title"": ""STEP 4 - Connect to Samsung Knox Asset Intelligence solution to configure Microsoft Sentinel to push select Knox Security Events as Alerts"", ""description"": ""1. Login to [Knox Asset Intelligence administration portal](https://central.samsungknox.com/kaiadmin/dai/home) and navigate to **Dashboard Settings**; this is available at the top-right corner of the Portal.\n> **Note**: Ensure the login user has access to 'Security' and 'Manage dashboard view and data collection' permissions.\n\n2. Click on Security tab to view settings for Microsoft Sentinel Integration and Knox Security Logs.\n\n3. In the Security Operations Integration page, toggle on **'Enable Microsoft Sentinel Integration'** and enter appropriate values in the required fields.\n\n >a. Based on the authentication method used, refer to information saved from STEP 1 while registering the Entra application. \n\n >b. For Microsoft Sentinel DCE and DCR, refer to the information saved from STEP 3. \n\n4. Click on **'Test Connection'** and ensure the connection is successful.\n\n5. Before you can Save, configure Knox Security Logs by selecting either Essential or Advanced configuration **(default: Essential).**\n\n6. To complete the Microsoft Sentinel integration, click **'Save'**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Entra app"", ""description"": ""An Entra app needs to be registered and provisioned with \u2018Microsoft Metrics Publisher\u2019 role and configured with either Certificate or Client Secret as credentials for secure data transfer. See [the Log ingestion tutorial to learn more about Entra App creation, registration and credential configuration.](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal) ""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Samsung%20Knox%20Asset%20Intelligence/Data%20Connectors/Template_Samsung.json","true" +"Samsung_Knox_Network_CL","Samsung Knox Asset Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Samsung%20Knox%20Asset%20Intelligence","samsungelectronics1632791654245","azure-sentinel-solution-samsung-knox-kai","2025-01-15","","","Samsung Electronics Co., Ltd.","Partner","https://www2.samsungknox.com/en/support","","domains","SamsungDCDefinition","Samsung","Samsung Knox Asset Intelligence","Samsung Knox Asset Intelligence Data Connector lets you centralize your mobile security events and logs in order to view customized insights using the Workbook template, and identify incidents based on Analytics Rules templates.","[{""title"": """", ""description"": ""This Data Connector uses the Microsoft Log Ingestion API to push security events into Microsoft Sentinel from Samsung Knox Asset Intelligence (KAI) solution.""}, {""title"": ""STEP 1 - Create and register an Entra Application"", ""description"": "">**Note**: This Data Connector can support either Certificate-based or Client Secret-based authentication. For Certificate-based authentication, you can download the Samsung CA-signed certificate (public key) from [KAI documentation portal](https://docs.samsungknox.com/admin/knox-asset-intelligence/assets/samsung-knox-validation-certificate.crt). For Client Secret-based authentication, you can create the secret during the Entra application registration. Ensure you copy the Client Secret value as soon as it is generated.\n\n>**IMPORTANT:** Save the values for Tenant (Directory) ID and Client (Application) ID. If Client Secret-based authentication is enabled, save Client Secret (Secret Value) associated with the Entra app.""}, {""title"": ""STEP 2 - Automate deployment of this Data Connector using the below Azure Resource Manager (ARM) template"", ""description"": "">**IMPORTANT:** Before deploying the Data Connector, copy the below Workspace name associated with your Microsoft Sentinel (also your Log Analytics) instance."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceName""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": ""1. Click the button below to install Samsung Knox Intelligence Solution. \n\n\t[![DeployToAzure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SamsungDCDefinition-azuredeploy)\\n2. Provide the following required fields: Log Analytics Workspace Name, Log Analytics Workspace Location, Log Analytics Workspace Subscription (ID) and Log Analytics Workspace Resource Group.""}, {""title"": ""STEP 3 - Obtain Microsoft Sentinel Data Collection details"", ""description"": ""Once the ARM template is deployed, navigate to Data Collection Rules https://portal.azure.com/#browse/microsoft.insights%2Fdatacollectionrules? and save values associated with the Immutable ID (DCR) and Data Collection Endpoint (DCE). \n\n>**IMPORTANT:** To enable end-to-end integration, information related to Microsoft Sentinel DCE and DCR are required for configuration in Samsung Knox Asset Intelligence portal (STEP 4). \n\nEnsure the Entra Application created in STEP 1 has permissions to use the DCR created in order to send data to the DCE. Please refer to https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#assign-permissions-to-the-dcr to assign permissions accordingly.""}, {""title"": ""STEP 4 - Connect to Samsung Knox Asset Intelligence solution to configure Microsoft Sentinel to push select Knox Security Events as Alerts"", ""description"": ""1. Login to [Knox Asset Intelligence administration portal](https://central.samsungknox.com/kaiadmin/dai/home) and navigate to **Dashboard Settings**; this is available at the top-right corner of the Portal.\n> **Note**: Ensure the login user has access to 'Security' and 'Manage dashboard view and data collection' permissions.\n\n2. Click on Security tab to view settings for Microsoft Sentinel Integration and Knox Security Logs.\n\n3. In the Security Operations Integration page, toggle on **'Enable Microsoft Sentinel Integration'** and enter appropriate values in the required fields.\n\n >a. Based on the authentication method used, refer to information saved from STEP 1 while registering the Entra application. \n\n >b. For Microsoft Sentinel DCE and DCR, refer to the information saved from STEP 3. \n\n4. Click on **'Test Connection'** and ensure the connection is successful.\n\n5. Before you can Save, configure Knox Security Logs by selecting either Essential or Advanced configuration **(default: Essential).**\n\n6. To complete the Microsoft Sentinel integration, click **'Save'**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Entra app"", ""description"": ""An Entra app needs to be registered and provisioned with \u2018Microsoft Metrics Publisher\u2019 role and configured with either Certificate or Client Secret as credentials for secure data transfer. See [the Log ingestion tutorial to learn more about Entra App creation, registration and credential configuration.](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal) ""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Samsung%20Knox%20Asset%20Intelligence/Data%20Connectors/Template_Samsung.json","true" +"Samsung_Knox_Process_CL","Samsung Knox Asset Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Samsung%20Knox%20Asset%20Intelligence","samsungelectronics1632791654245","azure-sentinel-solution-samsung-knox-kai","2025-01-15","","","Samsung Electronics Co., Ltd.","Partner","https://www2.samsungknox.com/en/support","","domains","SamsungDCDefinition","Samsung","Samsung Knox Asset Intelligence","Samsung Knox Asset Intelligence Data Connector lets you centralize your mobile security events and logs in order to view customized insights using the Workbook template, and identify incidents based on Analytics Rules templates.","[{""title"": """", ""description"": ""This Data Connector uses the Microsoft Log Ingestion API to push security events into Microsoft Sentinel from Samsung Knox Asset Intelligence (KAI) solution.""}, {""title"": ""STEP 1 - Create and register an Entra Application"", ""description"": "">**Note**: This Data Connector can support either Certificate-based or Client Secret-based authentication. For Certificate-based authentication, you can download the Samsung CA-signed certificate (public key) from [KAI documentation portal](https://docs.samsungknox.com/admin/knox-asset-intelligence/assets/samsung-knox-validation-certificate.crt). For Client Secret-based authentication, you can create the secret during the Entra application registration. Ensure you copy the Client Secret value as soon as it is generated.\n\n>**IMPORTANT:** Save the values for Tenant (Directory) ID and Client (Application) ID. If Client Secret-based authentication is enabled, save Client Secret (Secret Value) associated with the Entra app.""}, {""title"": ""STEP 2 - Automate deployment of this Data Connector using the below Azure Resource Manager (ARM) template"", ""description"": "">**IMPORTANT:** Before deploying the Data Connector, copy the below Workspace name associated with your Microsoft Sentinel (also your Log Analytics) instance."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceName""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": ""1. Click the button below to install Samsung Knox Intelligence Solution. \n\n\t[![DeployToAzure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SamsungDCDefinition-azuredeploy)\\n2. Provide the following required fields: Log Analytics Workspace Name, Log Analytics Workspace Location, Log Analytics Workspace Subscription (ID) and Log Analytics Workspace Resource Group.""}, {""title"": ""STEP 3 - Obtain Microsoft Sentinel Data Collection details"", ""description"": ""Once the ARM template is deployed, navigate to Data Collection Rules https://portal.azure.com/#browse/microsoft.insights%2Fdatacollectionrules? and save values associated with the Immutable ID (DCR) and Data Collection Endpoint (DCE). \n\n>**IMPORTANT:** To enable end-to-end integration, information related to Microsoft Sentinel DCE and DCR are required for configuration in Samsung Knox Asset Intelligence portal (STEP 4). \n\nEnsure the Entra Application created in STEP 1 has permissions to use the DCR created in order to send data to the DCE. Please refer to https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#assign-permissions-to-the-dcr to assign permissions accordingly.""}, {""title"": ""STEP 4 - Connect to Samsung Knox Asset Intelligence solution to configure Microsoft Sentinel to push select Knox Security Events as Alerts"", ""description"": ""1. Login to [Knox Asset Intelligence administration portal](https://central.samsungknox.com/kaiadmin/dai/home) and navigate to **Dashboard Settings**; this is available at the top-right corner of the Portal.\n> **Note**: Ensure the login user has access to 'Security' and 'Manage dashboard view and data collection' permissions.\n\n2. Click on Security tab to view settings for Microsoft Sentinel Integration and Knox Security Logs.\n\n3. In the Security Operations Integration page, toggle on **'Enable Microsoft Sentinel Integration'** and enter appropriate values in the required fields.\n\n >a. Based on the authentication method used, refer to information saved from STEP 1 while registering the Entra application. \n\n >b. For Microsoft Sentinel DCE and DCR, refer to the information saved from STEP 3. \n\n4. Click on **'Test Connection'** and ensure the connection is successful.\n\n5. Before you can Save, configure Knox Security Logs by selecting either Essential or Advanced configuration **(default: Essential).**\n\n6. To complete the Microsoft Sentinel integration, click **'Save'**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Entra app"", ""description"": ""An Entra app needs to be registered and provisioned with \u2018Microsoft Metrics Publisher\u2019 role and configured with either Certificate or Client Secret as credentials for secure data transfer. See [the Log ingestion tutorial to learn more about Entra App creation, registration and credential configuration.](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal) ""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Samsung%20Knox%20Asset%20Intelligence/Data%20Connectors/Template_Samsung.json","true" +"Samsung_Knox_System_CL","Samsung Knox Asset Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Samsung%20Knox%20Asset%20Intelligence","samsungelectronics1632791654245","azure-sentinel-solution-samsung-knox-kai","2025-01-15","","","Samsung Electronics Co., Ltd.","Partner","https://www2.samsungknox.com/en/support","","domains","SamsungDCDefinition","Samsung","Samsung Knox Asset Intelligence","Samsung Knox Asset Intelligence Data Connector lets you centralize your mobile security events and logs in order to view customized insights using the Workbook template, and identify incidents based on Analytics Rules templates.","[{""title"": """", ""description"": ""This Data Connector uses the Microsoft Log Ingestion API to push security events into Microsoft Sentinel from Samsung Knox Asset Intelligence (KAI) solution.""}, {""title"": ""STEP 1 - Create and register an Entra Application"", ""description"": "">**Note**: This Data Connector can support either Certificate-based or Client Secret-based authentication. For Certificate-based authentication, you can download the Samsung CA-signed certificate (public key) from [KAI documentation portal](https://docs.samsungknox.com/admin/knox-asset-intelligence/assets/samsung-knox-validation-certificate.crt). For Client Secret-based authentication, you can create the secret during the Entra application registration. Ensure you copy the Client Secret value as soon as it is generated.\n\n>**IMPORTANT:** Save the values for Tenant (Directory) ID and Client (Application) ID. If Client Secret-based authentication is enabled, save Client Secret (Secret Value) associated with the Entra app.""}, {""title"": ""STEP 2 - Automate deployment of this Data Connector using the below Azure Resource Manager (ARM) template"", ""description"": "">**IMPORTANT:** Before deploying the Data Connector, copy the below Workspace name associated with your Microsoft Sentinel (also your Log Analytics) instance."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceName""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": ""1. Click the button below to install Samsung Knox Intelligence Solution. \n\n\t[![DeployToAzure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SamsungDCDefinition-azuredeploy)\\n2. Provide the following required fields: Log Analytics Workspace Name, Log Analytics Workspace Location, Log Analytics Workspace Subscription (ID) and Log Analytics Workspace Resource Group.""}, {""title"": ""STEP 3 - Obtain Microsoft Sentinel Data Collection details"", ""description"": ""Once the ARM template is deployed, navigate to Data Collection Rules https://portal.azure.com/#browse/microsoft.insights%2Fdatacollectionrules? and save values associated with the Immutable ID (DCR) and Data Collection Endpoint (DCE). \n\n>**IMPORTANT:** To enable end-to-end integration, information related to Microsoft Sentinel DCE and DCR are required for configuration in Samsung Knox Asset Intelligence portal (STEP 4). \n\nEnsure the Entra Application created in STEP 1 has permissions to use the DCR created in order to send data to the DCE. Please refer to https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#assign-permissions-to-the-dcr to assign permissions accordingly.""}, {""title"": ""STEP 4 - Connect to Samsung Knox Asset Intelligence solution to configure Microsoft Sentinel to push select Knox Security Events as Alerts"", ""description"": ""1. Login to [Knox Asset Intelligence administration portal](https://central.samsungknox.com/kaiadmin/dai/home) and navigate to **Dashboard Settings**; this is available at the top-right corner of the Portal.\n> **Note**: Ensure the login user has access to 'Security' and 'Manage dashboard view and data collection' permissions.\n\n2. Click on Security tab to view settings for Microsoft Sentinel Integration and Knox Security Logs.\n\n3. In the Security Operations Integration page, toggle on **'Enable Microsoft Sentinel Integration'** and enter appropriate values in the required fields.\n\n >a. Based on the authentication method used, refer to information saved from STEP 1 while registering the Entra application. \n\n >b. For Microsoft Sentinel DCE and DCR, refer to the information saved from STEP 3. \n\n4. Click on **'Test Connection'** and ensure the connection is successful.\n\n5. Before you can Save, configure Knox Security Logs by selecting either Essential or Advanced configuration **(default: Essential).**\n\n6. To complete the Microsoft Sentinel integration, click **'Save'**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Entra app"", ""description"": ""An Entra app needs to be registered and provisioned with \u2018Microsoft Metrics Publisher\u2019 role and configured with either Certificate or Client Secret as credentials for secure data transfer. See [the Log ingestion tutorial to learn more about Entra App creation, registration and credential configuration.](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal) ""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Samsung%20Knox%20Asset%20Intelligence/Data%20Connectors/Template_Samsung.json","true" +"Samsung_Knox_User_CL","Samsung Knox Asset Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Samsung%20Knox%20Asset%20Intelligence","samsungelectronics1632791654245","azure-sentinel-solution-samsung-knox-kai","2025-01-15","","","Samsung Electronics Co., Ltd.","Partner","https://www2.samsungknox.com/en/support","","domains","SamsungDCDefinition","Samsung","Samsung Knox Asset Intelligence","Samsung Knox Asset Intelligence Data Connector lets you centralize your mobile security events and logs in order to view customized insights using the Workbook template, and identify incidents based on Analytics Rules templates.","[{""title"": """", ""description"": ""This Data Connector uses the Microsoft Log Ingestion API to push security events into Microsoft Sentinel from Samsung Knox Asset Intelligence (KAI) solution.""}, {""title"": ""STEP 1 - Create and register an Entra Application"", ""description"": "">**Note**: This Data Connector can support either Certificate-based or Client Secret-based authentication. For Certificate-based authentication, you can download the Samsung CA-signed certificate (public key) from [KAI documentation portal](https://docs.samsungknox.com/admin/knox-asset-intelligence/assets/samsung-knox-validation-certificate.crt). For Client Secret-based authentication, you can create the secret during the Entra application registration. Ensure you copy the Client Secret value as soon as it is generated.\n\n>**IMPORTANT:** Save the values for Tenant (Directory) ID and Client (Application) ID. If Client Secret-based authentication is enabled, save Client Secret (Secret Value) associated with the Entra app.""}, {""title"": ""STEP 2 - Automate deployment of this Data Connector using the below Azure Resource Manager (ARM) template"", ""description"": "">**IMPORTANT:** Before deploying the Data Connector, copy the below Workspace name associated with your Microsoft Sentinel (also your Log Analytics) instance."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceName""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": ""1. Click the button below to install Samsung Knox Intelligence Solution. \n\n\t[![DeployToAzure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SamsungDCDefinition-azuredeploy)\\n2. Provide the following required fields: Log Analytics Workspace Name, Log Analytics Workspace Location, Log Analytics Workspace Subscription (ID) and Log Analytics Workspace Resource Group.""}, {""title"": ""STEP 3 - Obtain Microsoft Sentinel Data Collection details"", ""description"": ""Once the ARM template is deployed, navigate to Data Collection Rules https://portal.azure.com/#browse/microsoft.insights%2Fdatacollectionrules? and save values associated with the Immutable ID (DCR) and Data Collection Endpoint (DCE). \n\n>**IMPORTANT:** To enable end-to-end integration, information related to Microsoft Sentinel DCE and DCR are required for configuration in Samsung Knox Asset Intelligence portal (STEP 4). \n\nEnsure the Entra Application created in STEP 1 has permissions to use the DCR created in order to send data to the DCE. Please refer to https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#assign-permissions-to-the-dcr to assign permissions accordingly.""}, {""title"": ""STEP 4 - Connect to Samsung Knox Asset Intelligence solution to configure Microsoft Sentinel to push select Knox Security Events as Alerts"", ""description"": ""1. Login to [Knox Asset Intelligence administration portal](https://central.samsungknox.com/kaiadmin/dai/home) and navigate to **Dashboard Settings**; this is available at the top-right corner of the Portal.\n> **Note**: Ensure the login user has access to 'Security' and 'Manage dashboard view and data collection' permissions.\n\n2. Click on Security tab to view settings for Microsoft Sentinel Integration and Knox Security Logs.\n\n3. In the Security Operations Integration page, toggle on **'Enable Microsoft Sentinel Integration'** and enter appropriate values in the required fields.\n\n >a. Based on the authentication method used, refer to information saved from STEP 1 while registering the Entra application. \n\n >b. For Microsoft Sentinel DCE and DCR, refer to the information saved from STEP 3. \n\n4. Click on **'Test Connection'** and ensure the connection is successful.\n\n5. Before you can Save, configure Knox Security Logs by selecting either Essential or Advanced configuration **(default: Essential).**\n\n6. To complete the Microsoft Sentinel integration, click **'Save'**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Entra app"", ""description"": ""An Entra app needs to be registered and provisioned with \u2018Microsoft Metrics Publisher\u2019 role and configured with either Certificate or Client Secret as credentials for secure data transfer. See [the Log ingestion tutorial to learn more about Entra App creation, registration and credential configuration.](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal) ""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Samsung%20Knox%20Asset%20Intelligence/Data%20Connectors/Template_Samsung.json","true" +"ABAPAuditLog","SecurityBridge App","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SecurityBridge%20App","securitybridge1647511278080","securitybridge-sentinel-app-1","2022-02-17","","","SecurityBridge","Partner","https://securitybridge.com/contact/","","domains,verticals","SecurityBridge","SecurityBridge Group GmbH","SecurityBridge Solution for SAP","SecurityBridge enhances SAP security by integrating seamlessly with Microsoft Sentinel, enabling real-time monitoring and threat detection across SAP environments. This integration allows Security Operations Centers (SOCs) to consolidate SAP security events with other organizational data, providing a unified view of the threat landscape . Leveraging AI-powered analytics and Microsoft’s Security Copilot, SecurityBridge identifies sophisticated attack patterns and vulnerabilities within SAP applications, including ABAP code scanning and configuration assessments . The solution supports scalable deployments across complex SAP landscapes, whether on-premises, in the cloud, or hybrid environments . By bridging the gap between IT and SAP security teams, SecurityBridge empowers organizations to proactively detect, investigate, and respond to threats, enhancing overall security posture.","[{""title"": ""1. Create ARM Resources and Provide the Required Permissions"", ""description"": ""We will create data collection rule (DCR) and data collection endpoint (DCE) resources. We will also create a Microsoft Entra app registration and assign the required permissions to it."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Automated deployment of Azure resources\nClicking on \""Deploy push connector resources\"" will trigger the creation of DCR and DCE resources.\nIt will then create a Microsoft Entra app registration with client secret and grant permissions on the DCR. This setup enables data to be sent securely to the DCR using a OAuth v2 client credentials.""}}, {""parameters"": {""label"": ""Deploy push connector resources"", ""applicationDisplayName"": ""SecurityBridge Solution for SAP""}, ""type"": ""DeployPushConnectorButton_test""}]}, {""title"": ""2. Maintain the data collection endpoint details and authentication info in SecurityBridge"", ""description"": ""Share the data collection endpoint URL and authentication info with the SecurityBridge administrator to configure the Securitybridge to send data to the data collection endpoint.\n\nLearn more from our KB Page https://abap-experts.atlassian.net/wiki/spaces/SB/pages/4099309579/REST+Push+Interface"", ""instructions"": [{""parameters"": {""label"": ""Use this value to configure as Tenant ID in the LogIngestionAPI credential."", ""fillWith"": [""TenantId""]}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra Application ID"", ""fillWith"": [""ApplicationId""], ""placeholder"": ""Deploy push connector to get the Application ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra Application Secret"", ""fillWith"": [""ApplicationSecret""], ""placeholder"": ""Deploy push connector to get the Application Secret""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Use this value to configure the LogsIngestionURL parameter when deploying the IFlow."", ""fillWith"": [""DataCollectionEndpoint""], ""placeholder"": ""Deploy push connector to get the DCE URI""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""DCR Immutable ID"", ""fillWith"": [""DataCollectionRuleId""], ""placeholder"": ""Deploy push connector to get the DCR ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Sentinel for SAP Stream ID"", ""value"": ""SAP_ABAPAUDITLOG""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""SecurityBridge_CL Stream ID"", ""value"": ""Custom-SecurityBridge_CL""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""Read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft Entra"", ""description"": ""Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher.""}, {""name"": ""Microsoft Azure"", ""description"": ""Permission to assign Monitoring Metrics Publisher role on data collection rules. Typically requires Azure RBAC Owner or User Access Administrator role.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SecurityBridge%20App/Data%20Connectors/SecurityBridge_PUSH_CCP/SecurityBridge_connectorDefinition.json","true" +"SecurityBridgeLogs_CL","SecurityBridge App","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SecurityBridge%20App","securitybridge1647511278080","securitybridge-sentinel-app-1","2022-02-17","","","SecurityBridge","Partner","https://securitybridge.com/contact/","","domains,verticals","SecurityBridgeSAP","SecurityBridge","SecurityBridge Threat Detection for SAP","SecurityBridge is the first and only holistic, natively integrated security platform, addressing all aspects needed to protect organizations running SAP from internal and external threats against their core business applications. The SecurityBridge platform is an SAP-certified add-on, used by organizations around the globe, and addresses the clients’ need for advanced cybersecurity, real-time monitoring, compliance, code security, and patching to protect against internal and external threats.This Microsoft Sentinel Solution allows you to integrate SecurityBridge Threat Detection events from all your on-premise and cloud based SAP instances into your security monitoring.Use this Microsoft Sentinel Solution to receive normalized and speaking security events, pre-built dashboards and out-of-the-box templates for your SAP security monitoring.","[{""title"": """", ""description"": ""*NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias SecurityBridgeLogs and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SecurityBridge%20App/Parsers/SecurityBridgeLogs.txt).The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": """", ""description"": "">**NOTE:** This data connector has been developed using SecurityBridge Application Platform 7.4.0."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""This solution requires logs collection via an Microsoft Sentinel agent installation\n\n> The Microsoft Sentinel agent is supported on the following Operating Systems: \n1. Windows Servers \n2. SUSE Linux Enterprise Server\n3. Redhat Linux Enterprise Server\n4. Oracle Linux Enterprise Server\n5. If you have the SAP solution installed on HPUX / AIX then you will need to deploy a log collector on one of the Linux options listed above and forward your logs to that collector\n\n"", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the custom log directory to be collected"", ""instructions"": [{""parameters"": {""linkType"": ""OpenCustomLogsSettings""}, ""type"": ""InstallAgent""}]}, {""description"": ""1. Select the link above to open your workspace advanced settings \n2. Click **+Add custom**\n3. Click **Browse** to upload a sample of a SecurityBridge SAP log file (e.g. AED_20211129164544.cef). Then, click **Next >**\n4. Select **New Line** as the record delimiter then click **Next >**\n5. Select **Windows** or **Linux** and enter the path to SecurityBridge logs based on your configuration. Example:\n - '/usr/sap/tmp/sb_events/*.cef' \n\n>**NOTE:** You can add as many paths as you want in the configuration.\n\n6. After entering the path, click the '+' symbol to apply, then click **Next >** \n7. Add **SecurityBridgeLogs** as the custom log Name and click **Done**""}, {""title"": ""3. Check logs in Microsoft Sentinel"", ""description"": ""Open Log Analytics to check if the logs are received using the SecurityBridgeLogs_CL Custom log table.\n\n>**NOTE:** It may take up to 30 minutes before new logs will appear in SecurityBridgeLogs_CL table."", ""instructions"": []}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SecurityBridge%20App/Data%20Connectors/Connector_SecurityBridge.json","true" +"SecurityScorecardFactor_CL","SecurityScorecard Cybersecurity Ratings","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SecurityScorecard%20Cybersecurity%20Ratings","SecurityScorecard","SecurityScorecard","2022-10-01","2022-10-01","","SecurityScorecard","Partner","https://support.securityscorecard.com/hc/en-us/requests/new","","domains","SecurityScorecardFactorAzureFunctions","SecurityScorecard","SecurityScorecard Factor","SecurityScorecard is the leader in cybersecurity risk ratings. The [SecurityScorecard](https://www.SecurityScorecard.com/) Factors data connector provides the ability for Sentinel to import SecurityScorecard factor ratings as logs. SecurityScorecard provides ratings for over 12 million companies and domains using countless data points from across the internet. Maintain full awareness of any company's security posture and be able to receive timely updates when factor scores change or drop. SecurityScorecard factor ratings are updated daily based on evidence collected across the web.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the SecurityScorecard API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the SecurityScorecard API**\n\n Follow these instructions to create/get a SecurityScorecard API token.\n 1. As an administrator in SecurityScorecard, navigate to My Settings and then Users\n 2. Click '+ Add User'\n 3. In the form, check off 'Check to create a bot user'\n 4. Provide a name for the Bot and provide it with Read Only permission\n 5. Click 'Add User'\n 6. Locate the newly created Bot user\n 7. Click 'create token' in the Bot user's row\n 8. Click 'Confirm' and note the API token that has been generated""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the SecurityScorecard Factor data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the SecurityScorecard API Authorization Key(s)"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the SecurityScorecard Factor connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SecurityScorecardFactorAPI-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tSecurityScorecard API Key \n\t\tSecurityScorecard Base URL (https://api.securityscorecard.io) \n\t\tDomain \n\t\tPortfolio IDs (Coma separated IDs) \n\t\tSecurityScorecard Factor Table Name (Default: SecurityScorecardFactor) \n\t\tLevel Factor Change (Default: 7) \n\t\tFactor Schedule (Default: 0 15 * * * *) \n\t\tDiff Override Own Factor (Default: true) \n\t\tDiff Override Portfolio Factor (Default: true) \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the SecurityScorecard Factor data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-SecurityScorecardFactorAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. SecurityScorecardXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tSecurityScorecard API Key \n\t\tSecurityScorecard Base URL (https://api.securityscorecard.io) \n\t\tDomain \n\t\tPortfolio IDs (Coma separated IDs) \n\t\tSecurityScorecard Factor Table Name (Default: SecurityScorecardFactor) \n\t\tLevel Factor Change (Default: 7) \n\t\tFactor Schedule (Default: 0 15 * * * *) \n\t\tDiff Override Own Factor (Default: true) \n\t\tDiff Override Portfolio Factor (Default: true) \n\t\tlogAnalyticsUri (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**SecurityScorecard API Key** is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SecurityScorecard%20Cybersecurity%20Ratings/Data%20Connectors/SecurityScorecardFactor/SecurityScorecardFactor_API_FunctionApp.json","true" +"SecurityScorecardIssues_CL","SecurityScorecard Cybersecurity Ratings","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SecurityScorecard%20Cybersecurity%20Ratings","SecurityScorecard","SecurityScorecard","2022-10-01","2022-10-01","","SecurityScorecard","Partner","https://support.securityscorecard.com/hc/en-us/requests/new","","domains","SecurityScorecardIssueAzureFunctions","SecurityScorecard","SecurityScorecard Issue","SecurityScorecard is the leader in cybersecurity risk ratings. The [SecurityScorecard](https://www.SecurityScorecard.com/) Issues data connector provides the ability for Sentinel to import SecurityScorecard issue data as logs. SecurityScorecard provides ratings for over 12 million companies and domains using countless data points from across the internet. Maintain full awareness of any company's security posture and be able to receive timely updates when new cybersecurity issues are discovered.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the SecurityScorecard API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the SecurityScorecard API**\n\n Follow these instructions to create/get a SecurityScorecard API token.\n 1. As an administrator in SecurityScorecard, navigate to My Settings and then Users\n 2. Click '+ Add User'\n 3. In the form, check off 'Check to create a bot user'\n 4. Provide a name for the Bot and provide it with Read Only permission\n 5. Click 'Add User'\n 6. Locate the newly created Bot user\n 7. Click 'create token' in the Bot user's row\n 8. Click 'Confirm' and note the API token that has been generated""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the SecurityScorecard Issue data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the SecurityScorecard API Authorization Key(s)"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the SecurityScorecard Issue connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SecurityScorecardIssueAPI-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tSecurityScorecard API Key \n\t\tSecurityScorecard Base URL (https://api.securityscorecard.io) \n\t\tDomain \n\t\tPortfolio IDs (Coma separated IDs) \n\t\tSecurityScorecard Issue Table Name (Default: SecurityScorecardIssue) \n\t\tLevel Issue Change (Default: 7) \n\t\tIssue Schedule (Default: 0 0,30 * * * *) \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the SecurityScorecard Issue data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-SecurityScorecardIssueAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. SecurityScorecardXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tSecurityScorecard API Key \n\t\tSecurityScorecard Base URL (https://api.securityscorecard.io) \n\t\tDomain \n\t\tPortfolio IDs (Coma separated IDs) \n\t\tSecurityScorecard Issue Table Name (Default: SecurityScorecardIssue) \n\t\tLevel Issue Change (Default: 7) \n\t\tIssue Schedule (Default: 0 0,30 * * * *) \n\t\tlogAnalyticsUri (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**SecurityScorecard API Key** is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SecurityScorecard%20Cybersecurity%20Ratings/Data%20Connectors/SecurityScorecardIssue/SecurityScorecardIssue_API_FunctionApp.json","true" +"SecurityScorecardRatings_CL","SecurityScorecard Cybersecurity Ratings","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SecurityScorecard%20Cybersecurity%20Ratings","SecurityScorecard","SecurityScorecard","2022-10-01","2022-10-01","","SecurityScorecard","Partner","https://support.securityscorecard.com/hc/en-us/requests/new","","domains","SecurityScorecardRatingsAzureFunctions","SecurityScorecard","SecurityScorecard Cybersecurity Ratings","SecurityScorecard is the leader in cybersecurity risk ratings. The [SecurityScorecard](https://www.SecurityScorecard.com/) data connector provides the ability for Sentinel to import SecurityScorecard ratings as logs. SecurityScorecard provides ratings for over 12 million companies and domains using countless data points from across the internet. Maintain full awareness of any company's security posture and be able to receive timely updates when scores change or drop. SecurityScorecard ratings are updated daily based on evidence collected across the web.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the SecurityScorecard API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the SecurityScorecard API**\n\n Follow these instructions to create/get a SecurityScorecard API token.\n 1. As an administrator in SecurityScorecard, navigate to My Settings and then Users\n 2. Click '+ Add User'\n 3. In the form, check off 'Check to create a bot user'\n 4. Provide a name for the Bot and provide it with Read Only permission\n 5. Click 'Add User'\n 6. Locate the newly created Bot user\n 7. Click 'create token' in the Bot user's row\n 8. Click 'Confirm' and note the API token that has been generated""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the SecurityScorecard Ratings data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following) readily available.., as well as the SecurityScorecard API Authorization Key(s)"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the SecurityScorecard Ratings connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SecurityScorecardRatingsAPI-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tSecurityScorecard API Key \n\t\tSecurityScorecard Base URL (https://api.securityscorecard.io) \n\t\tDomain \n\t\tPortfolio IDs (Coma separated IDs) \n\t\tSecurityScorecard Ratings Table Name (Default: SecurityScorecardRatings) \n\t\tLevel Ratings Change (Default: 7) \n\t\tRatings Schedule (Default: 0 45 * * * *) \n\t\tDiff Override Own Ratings (Default: true) \n\t\tDiff Override Portfolio Ratings (Default: true) \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the SecurityScorecard Ratings data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-SecurityScorecardRatingsAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. SecurityScorecardXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tSecurityScorecard API Key \n\t\tSecurityScorecard Base URL (https://api.securityscorecard.io) \n\t\tDomain \n\t\tPortfolio IDs (Coma separated IDs) \n\t\tSecurityScorecard Ratings Table Name (Default: SecurityScorecardRatings) \n\t\tLevel Ratings Change (Default: 7) \n\t\tRatings Schedule (Default: 0 45 * * * *) \n\t\tDiff Override Own Ratings (Default: true) \n\t\tDiff Override Portfolio Ratings (Default: true) \n\t\tlogAnalyticsUri (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**SecurityScorecard API Key** is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SecurityScorecard%20Cybersecurity%20Ratings/Data%20Connectors/SecurityScorecardRatings/SecurityScorecardRatings_API_FunctionApp.json","true" +"","SecurityThreatEssentialSolution","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SecurityThreatEssentialSolution","azuresentinel","azure-sentinel-solution-securitythreatessentialsol","2022-03-30","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"SecurityEvent","Semperis Directory Services Protector","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Semperis%20Directory%20Services%20Protector","semperis","directory-services-protector-solution","2021-10-18","","","Semperis","Partner","https://www.semperis.com/contact-us/","","domains","SemperisDSP","SEMPERIS","Semperis Directory Services Protector","Semperis Directory Services Protector data connector allows for the export of its Windows event logs (i.e. Indicators of Exposure and Indicators of Compromise) to Microsoft Sentinel in real time.
It provides a data parser to manipulate the Windows event logs more easily. The different workbooks ease your Active Directory security monitoring and provide different ways to visualize the data. The analytic templates allow to automate responses regarding different events, exposures, or attacks.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**dsp_parser**](https://aka.ms/sentinel-SemperisDSP-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""**Configure Windows Security Events via AMA connector**"", ""description"": ""Collect Windows security events logs from your **Semperis DSP Management Server** .""}, {""title"": ""1. Install the Azure Monitor Agent (AMA)"", ""description"": ""On your **Semperis DSP Management Server** install the AMA on the DSP machine that will act as the event log forwarder.\nYou can skip this step if you have already installed the Microsoft agent for Windows""}, {""title"": ""2. Create a Data Collection Rule (DCR)"", ""description"": ""Start collecting logs from the **Semperis DSP Management Server** .\n\n1. In the Azure portal, navigate to your **Log Analytics workspace**.\n2. In the left pane, click on **Configuration** and then **Data connectors**.\n3. Find and install the **the Windows Security Events via AMA** connector.\n4. Click on **Open connector** and then on **Create data collection rule**.\n5. Configure the DCR with the necessary details, such as the log sources and the destination workspace."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Semperis DSP Management Server"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""**Configure Common Event Format via AMA connector**"", ""description"": ""Collect syslog messages send from your **Semperis DSP Management Server** .""}, {""title"": ""1. Install the Azure Monitor Agent (AMA)"", ""description"": ""Install the AMA on the Linux machine that will act as the log forwarder. This machine will collect and forward CEF logs to Microsoft Sentinel.\nYou can skip this step if you have already installed the Microsoft agent for Linux""}, {""title"": ""2. Create a Data Collection Rule (DCR)"", ""description"": ""Start collecting logs from the **Semperis DSP Management Server** .\n\n1. In the Azure portal, navigate to your **Log Analytics workspace**.\n2. In the left pane, click on **Configuration** and then **Data connectors**.\n3. Find and install the **the Common Event Format via AMA** connector.\n4. Click on **Open connector** and then on **Create data collection rule**.\n5. Configure the DCR with the necessary details, such as the log sources and the destination workspace."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Semperis DSP Management Server"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""3. Configure sending CEF logs on your Semperis DSP Management Server"", ""description"": ""Configure your **Semperis DSP Management Server** to send CEF logs to the Linux machine where the AMA is installed. This involves setting the destination IP address and port for the CEF logs""}, {""title"": """", ""description"": ""> You should now be able to receive logs in the *Windows event log* table and *common log* table, log data can be parsed using the **dsp_parser()** function, used by all query samples, workbooks and analytic templates.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Semperis%20Directory%20Services%20Protector/Data%20Connectors/SemperisDSP-connector.json","true" +"SenservaPro_CL","SenservaPro","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SenservaPro","senservallc","senservapro4sentinel","2022-06-01","","","Senserva","Partner","https://www.senserva.com/contact/","","domains","SenservaPro","Senserva","SenservaPro (Preview)","The SenservaPro data connector provides a viewing experience for your SenservaPro scanning logs. View dashboards of your data, use queries to hunt & explore, and create custom alerts.","[{""title"": ""1. Setup the data connection"", ""description"": ""Visit [Senserva Setup](https://www.senserva.com/senserva-microsoft-sentinel-edition-setup/) for information on setting up the Senserva data connection, support, or any other questions. The Senserva installation will configure a Log Analytics Workspace for output. Deploy Microsoft Sentinel onto the configured Log Analytics Workspace to finish the data connection setup by following [this onboarding guide.](https://docs.microsoft.com/azure/sentinel/quickstart-onboard)"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SenservaPro/Data%20Connectors/SenservaPro.json","true" +"SentinelOne_CL","SentinelOne","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelOne","azuresentinel","azure-sentinel-solution-sentinelone","2024-11-26","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SentinelOne","SentinelOne","SentinelOne","The [SentinelOne](https://www.sentinelone.com/) data connector provides the capability to ingest common SentinelOne server objects such as Threats, Agents, Applications, Activities, Policies, Groups, and more events into Microsoft Sentinel through the REST API. Refer to API documentation: `https://.sentinelone.net/api-doc/overview` for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the SentinelOne API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias SentinelOne and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelOne/Parsers/SentinelOne.txt). The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the SentinelOne API**\n\n Follow the instructions to obtain the credentials.\n\n1. Log in to the SentinelOne Management Console with Admin user credentials.\n2. In the Management Console, click **Settings**.\n3. In the **SETTINGS** view, click **USERS**\n4. Click **New User**.\n5. Enter the information for the new console user.\n5. In Role, select **Admin**.\n6. Click **SAVE**\n7. Save credentials of the new user for using in the data connector.""}, {""title"": """", ""description"": ""**NOTE :-** Admin access can be delegated using custom roles. Please review SentinelOne [documentation](https://www.sentinelone.com/blog/feature-spotlight-fully-custom-role-based-access-control/) to learn more about custom RBAC.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the SentinelOne data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the SentinelOne Audit data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SentinelOneAPI-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-SentinelOneAPI-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **SentinelOneAPIToken**, **SentinelOneUrl** `(https://.sentinelone.net)` and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the SentinelOne Reports data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-SentinelOneAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. SOneXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n 1. In the Function App, select the Function App Name and select **Configuration**.\n\n 2. In the **Application settings** tab, select ** New application setting**.\n\n 3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\t SentinelOneAPIToken\n\t\t SentinelOneUrl\n\t\t WorkspaceID\n\t\t WorkspaceKey\n\t\t logAnalyticsUri (optional)\n\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n\n 4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**SentinelOneAPIToken** is required. See the documentation to learn more about API on the `https://.sentinelone.net/api-doc/overview`.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelOne/Data%20Connectors/SentinelOne_API_FunctionApp.json","true" +"SentinelOneActivities_CL","SentinelOne","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelOne","azuresentinel","azure-sentinel-solution-sentinelone","2024-11-26","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SentinelOneCCP","Microsoft","SentinelOne","The [SentinelOne](https://usea1-nessat.sentinelone.net/api-doc/overview) data connector allows ingesting logs from the SentinelOne API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the SentinelOne API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the SentinelOne API \n Follow the instructions to obtain the credentials. You can also follow the [guide](https://usea1-nessat.sentinelone.net/docs/en/how-to-automate-api-token-generation.html#how-to-automate-api-token-generation) to generate API key.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve SentinelOne Management URL\n 1.1. Log in to the SentinelOne [**Management Console**] with Admin user credentials\n 1.2. In the [**Management Console**] copy the URL link above without the URL path.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve API Token\n 2.1. Log in to the SentinelOne [**Management Console**] with Admin user credentials\n 2.2. In the [**Management Console**], click [**Settings**]\n 2.3. In [**Settings**] view click on [**USERS**].\n 2.4. In the [**USERS**] Page click on [**Service Users**] -> [**Actions**] -> [**Create new service user**].\n 2.5. Choose [**Expiration date**] and [**scope**] (by site) and click on [**Create User**].\n 2.6. Once the [**Service User**] is created copy the [**API Token**] from page and press [**Save**]""}}, {""parameters"": {""label"": ""SentinelOne Management URL"", ""placeholder"": ""https://example.sentinelone.net/"", ""type"": ""text"", ""name"": ""managementUrl""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""securestring"", ""name"": ""apitoken""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelOne/Data%20Connectors/SentinelOne_ccp/connectorDefinition.json","true" +"SentinelOneAgents_CL","SentinelOne","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelOne","azuresentinel","azure-sentinel-solution-sentinelone","2024-11-26","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SentinelOneCCP","Microsoft","SentinelOne","The [SentinelOne](https://usea1-nessat.sentinelone.net/api-doc/overview) data connector allows ingesting logs from the SentinelOne API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the SentinelOne API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the SentinelOne API \n Follow the instructions to obtain the credentials. You can also follow the [guide](https://usea1-nessat.sentinelone.net/docs/en/how-to-automate-api-token-generation.html#how-to-automate-api-token-generation) to generate API key.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve SentinelOne Management URL\n 1.1. Log in to the SentinelOne [**Management Console**] with Admin user credentials\n 1.2. In the [**Management Console**] copy the URL link above without the URL path.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve API Token\n 2.1. Log in to the SentinelOne [**Management Console**] with Admin user credentials\n 2.2. In the [**Management Console**], click [**Settings**]\n 2.3. In [**Settings**] view click on [**USERS**].\n 2.4. In the [**USERS**] Page click on [**Service Users**] -> [**Actions**] -> [**Create new service user**].\n 2.5. Choose [**Expiration date**] and [**scope**] (by site) and click on [**Create User**].\n 2.6. Once the [**Service User**] is created copy the [**API Token**] from page and press [**Save**]""}}, {""parameters"": {""label"": ""SentinelOne Management URL"", ""placeholder"": ""https://example.sentinelone.net/"", ""type"": ""text"", ""name"": ""managementUrl""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""securestring"", ""name"": ""apitoken""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelOne/Data%20Connectors/SentinelOne_ccp/connectorDefinition.json","true" +"SentinelOneAlerts_CL","SentinelOne","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelOne","azuresentinel","azure-sentinel-solution-sentinelone","2024-11-26","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SentinelOneCCP","Microsoft","SentinelOne","The [SentinelOne](https://usea1-nessat.sentinelone.net/api-doc/overview) data connector allows ingesting logs from the SentinelOne API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the SentinelOne API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the SentinelOne API \n Follow the instructions to obtain the credentials. You can also follow the [guide](https://usea1-nessat.sentinelone.net/docs/en/how-to-automate-api-token-generation.html#how-to-automate-api-token-generation) to generate API key.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve SentinelOne Management URL\n 1.1. Log in to the SentinelOne [**Management Console**] with Admin user credentials\n 1.2. In the [**Management Console**] copy the URL link above without the URL path.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve API Token\n 2.1. Log in to the SentinelOne [**Management Console**] with Admin user credentials\n 2.2. In the [**Management Console**], click [**Settings**]\n 2.3. In [**Settings**] view click on [**USERS**].\n 2.4. In the [**USERS**] Page click on [**Service Users**] -> [**Actions**] -> [**Create new service user**].\n 2.5. Choose [**Expiration date**] and [**scope**] (by site) and click on [**Create User**].\n 2.6. Once the [**Service User**] is created copy the [**API Token**] from page and press [**Save**]""}}, {""parameters"": {""label"": ""SentinelOne Management URL"", ""placeholder"": ""https://example.sentinelone.net/"", ""type"": ""text"", ""name"": ""managementUrl""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""securestring"", ""name"": ""apitoken""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelOne/Data%20Connectors/SentinelOne_ccp/connectorDefinition.json","true" +"SentinelOneGroups_CL","SentinelOne","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelOne","azuresentinel","azure-sentinel-solution-sentinelone","2024-11-26","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SentinelOneCCP","Microsoft","SentinelOne","The [SentinelOne](https://usea1-nessat.sentinelone.net/api-doc/overview) data connector allows ingesting logs from the SentinelOne API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the SentinelOne API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the SentinelOne API \n Follow the instructions to obtain the credentials. You can also follow the [guide](https://usea1-nessat.sentinelone.net/docs/en/how-to-automate-api-token-generation.html#how-to-automate-api-token-generation) to generate API key.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve SentinelOne Management URL\n 1.1. Log in to the SentinelOne [**Management Console**] with Admin user credentials\n 1.2. In the [**Management Console**] copy the URL link above without the URL path.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve API Token\n 2.1. Log in to the SentinelOne [**Management Console**] with Admin user credentials\n 2.2. In the [**Management Console**], click [**Settings**]\n 2.3. In [**Settings**] view click on [**USERS**].\n 2.4. In the [**USERS**] Page click on [**Service Users**] -> [**Actions**] -> [**Create new service user**].\n 2.5. Choose [**Expiration date**] and [**scope**] (by site) and click on [**Create User**].\n 2.6. Once the [**Service User**] is created copy the [**API Token**] from page and press [**Save**]""}}, {""parameters"": {""label"": ""SentinelOne Management URL"", ""placeholder"": ""https://example.sentinelone.net/"", ""type"": ""text"", ""name"": ""managementUrl""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""securestring"", ""name"": ""apitoken""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelOne/Data%20Connectors/SentinelOne_ccp/connectorDefinition.json","true" +"SentinelOneThreats_CL","SentinelOne","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelOne","azuresentinel","azure-sentinel-solution-sentinelone","2024-11-26","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SentinelOneCCP","Microsoft","SentinelOne","The [SentinelOne](https://usea1-nessat.sentinelone.net/api-doc/overview) data connector allows ingesting logs from the SentinelOne API into Microsoft Sentinel. The data connector is built on Microsoft Sentinel Codeless Connector Platform. It uses the SentinelOne API to fetch logs and it supports DCR-based [ingestion time transformations](https://docs.microsoft.com/azure/azure-monitor/logs/custom-logs-overview) that parses the received security data into a custom table so that queries don't need to parse it again, thus resulting in better performance.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Configuration steps for the SentinelOne API \n Follow the instructions to obtain the credentials. You can also follow the [guide](https://usea1-nessat.sentinelone.net/docs/en/how-to-automate-api-token-generation.html#how-to-automate-api-token-generation) to generate API key.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Retrieve SentinelOne Management URL\n 1.1. Log in to the SentinelOne [**Management Console**] with Admin user credentials\n 1.2. In the [**Management Console**] copy the URL link above without the URL path.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Retrieve API Token\n 2.1. Log in to the SentinelOne [**Management Console**] with Admin user credentials\n 2.2. In the [**Management Console**], click [**Settings**]\n 2.3. In [**Settings**] view click on [**USERS**].\n 2.4. In the [**USERS**] Page click on [**Service Users**] -> [**Actions**] -> [**Create new service user**].\n 2.5. Choose [**Expiration date**] and [**scope**] (by site) and click on [**Create User**].\n 2.6. Once the [**Service User**] is created copy the [**API Token**] from page and press [**Save**]""}}, {""parameters"": {""label"": ""SentinelOne Management URL"", ""placeholder"": ""https://example.sentinelone.net/"", ""type"": ""text"", ""name"": ""managementUrl""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""securestring"", ""name"": ""apitoken""}, ""type"": ""Textbox""}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""innerSteps"": null}]","{""tenant"": null, ""licenses"": null, ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelOne/Data%20Connectors/SentinelOne_ccp/connectorDefinition.json","true" +"","SentinelSOARessentials","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SentinelSOARessentials","azuresentinel","azure-sentinel-solution-sentinelsoaressentials","2022-06-27","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"SeraphicWebSecurity_CL","SeraphicSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SeraphicSecurity","seraphicalgorithmsltd1616061090462","seraphic-security-sentinel","2023-07-31","2023-07-31","","Seraphic Security","Partner","https://seraphicsecurity.com","","domains","SeraphicWebSecurity","Seraphic","Seraphic Web Security","The Seraphic Web Security data connector provides the capability to ingest [Seraphic Web Security](https://seraphicsecurity.com/) events and alerts into Microsoft Sentinel.","[{""title"": ""Connect Seraphic Web Security"", ""description"": ""Please insert the integration name, the Seraphic integration URL and your workspace name for Microsoft Sentinel:"", ""instructions"": [{""parameters"": {""enable"": ""true"", ""userRequestPlaceHoldersInput"": [{""displayText"": ""Integration Name"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{subscriptionId}}""}, {""displayText"": ""Integration URL"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{endpoint}}""}, {""displayText"": ""Workspace Name - Log Analytics"", ""requestObjectKey"": ""apiEndpoint"", ""placeHolderName"": ""{{workspaceName}}""}]}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true, ""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Seraphic API key"", ""description"": ""API key for Microsoft Sentinel connected to your Seraphic Web Security tenant. To get this API key for your tenant - [read this documentation](https://constellation.seraphicsecurity.com/integrations/microsoft_sentinel/Guidance/MicrosoftSentinel-IntegrationGuide-230822.pdf).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SeraphicSecurity/Data%20Connectors/SeraphicSecurityConnector.json","true" +"","ServiceNow TISC","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ServiceNow%20TISC","servicenow1594831756316","sentinel-solution-tisc","2025-01-15","2025-01-15","","ServiceNow","Partner","https://support.servicenow.com/now","","domains","","","","","","","","false" +"","Servicenow","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Servicenow","azuresentinel","azure-sentinel-solution-servicenow","2022-09-19","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"Sevco_Devices_CL","SevcoSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SevcoSecurity","azuresentinel","azure-sentinel-solution-sevcosecurity","2023-05-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SevcoDevices","Sevco Security","Sevco Platform - Devices","The Sevco Platform - Devices connector allows you to easily connect your Sevco Device Assets with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization’s assets and improves your security operation capabilities.

[For more information >​](https://docs.sev.co/docs/microsoft-sentinel-inventory)","[{""title"": ""Configure and connect to Sevco"", ""description"": ""The Sevco Platform can integrate with and export assets directly to Microsoft Sentinel..\u200b\n\n1. Go to [Sevco - Microsoft Sentinel Integration](https://docs.sev.co/docs/microsoft-sentinel-inventory), and follow the instructions, using the parameters below to set up the connection:.\n\n"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SevcoSecurity/Data%20Connectors/Connector_SevcoSecurity.json","true" +"","ShadowByte Aria","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ShadowByte%20Aria","shadowbyte1641237427416","ariasent1","2021-12-24","","","Shadowbyte","Partner","https://shadowbyte.com/products/aria/","","domains","","","","","","","","false" +"","Shodan","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Shodan","azuresentinel","azure-sentinel-solution-shodan","2023-02-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"CommonSecurityLog","Silverfort","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Silverfort","silverfort","microsoft-sentinel-solution-silverfort","2024-09-01","","","Silverfort","Partner","https://www.silverfort.com/customer-success/#support","","domains","SilverfortAma","Silverfort","Silverfort Admin Console","The [Silverfort](https://silverfort.com) ITDR Admin Console connector solution allows ingestion of Silverfort events and logging into Microsoft Sentinel.
Silverfort provides syslog based events and logging using Common Event Format (CEF). By forwarding your Silverfort ITDR Admin Console CEF data into Microsoft Sentinel, you can take advantage of Sentinels's search & correlation, alerting, and threat intelligence enrichment on Silverfort data.
Please contact Silverfort or consult the Silverfort documentation for more information.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Silverfort/Data%20Connectors/SilverfortAma.json","true" +"SlackAuditNativePoller_CL","SlackAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SlackAudit","azuresentinel","azure-sentinel-solution-slackaudit","2021-03-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SlackAudit","Slack","Slack","The [Slack](https://slack.com) data connector provides the capability to ingest [Slack Audit Records](https://api.slack.com/admins/audit-logs) events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://api.slack.com/admins/audit-logs#the_audit_event) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more. This data connector uses Microsoft Sentinel native polling capability.","[{""title"": ""Connect Slack to Microsoft Sentinel"", ""description"": ""Enable Slack audit Logs."", ""instructions"": [{""parameters"": {""enable"": ""true""}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Slack API credentials"", ""description"": ""**SlackAPIBearerToken** is required for REST API. [See the documentation to learn more about API](https://api.slack.com/web#authentication). Check all [requirements and follow the instructions](https://api.slack.com/web#authentication) for obtaining credentials.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SlackAudit/Data%20Connectors/SlackNativePollerConnector/azuredeploy_Slack_native_poller_connector.json","true" +"SlackAudit_CL","SlackAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SlackAudit","azuresentinel","azure-sentinel-solution-slackaudit","2021-03-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SlackAuditAPI","Slack","[DEPRECATED] Slack Audit","The [Slack](https://slack.com) Audit data connector provides the capability to ingest [Slack Audit Records](https://api.slack.com/admins/audit-logs) events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://api.slack.com/admins/audit-logs#the_audit_event) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Slack REST API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://aka.ms/sentinel-SlackAuditAPI-parser) to create the Kusto functions alias, **SlackAudit**""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Slack API**\n\n [Follow the instructions](https://api.slack.com/web#authentication) to obtain the credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Slack Audit data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Slack Audit data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SlackAuditAPI-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **SlackAPIBearerToken** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Slack Audit data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the [Azure Function App](https://aka.ms/sentinel-SlackAuditAPI-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select ** New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tSlackAPIBearerToken\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**SlackAPIBearerToken** is required for REST API. [See the documentation to learn more about API](https://api.slack.com/web#authentication). Check all [requirements and follow the instructions](https://api.slack.com/web#authentication) for obtaining credentials.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SlackAudit/Data%20Connectors/SlackAudit_API_FunctionApp.json","true" +"SlackAuditV2_CL","SlackAudit","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SlackAudit","azuresentinel","azure-sentinel-solution-slackaudit","2021-03-24","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SlackAuditLogsCCPDefinition","Microsoft","SlackAudit (via Codeless Connector Framework)","The SlackAudit data connector provides the capability to ingest [Slack Audit logs](https://api.slack.com/admins/audit-logs) into Microsoft Sentinel through the REST API. Refer to [API documentation](https://api.slack.com/admins/audit-logs-call) for more information.","[{""description"": ""To ingest data from SlackAudit to Microsoft Sentinel, you have to click on Add Domain button below then you get a pop up to fill the details, provide the required information and click on Connect. You can see the usernames, actions connected in the grid.\n>"", ""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""UserName"", ""columnValue"": ""properties.addOnAttributes.UserName""}, {""columnName"": ""Actions"", ""columnValue"": ""properties.addOnAttributes.Actions""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add domain"", ""title"": ""Add domain"", ""subtitle"": ""Add domain"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""UserName"", ""placeholder"": ""Enter your User Name"", ""name"": ""UserName"", ""type"": ""text"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""SlackAudit API Key"", ""placeholder"": ""Enter your API KEY"", ""name"": ""apiKey"", ""type"": ""password"", ""required"": true}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""SlackAudit Action Type"", ""placeholder"": ""Enter the Action Type"", ""name"": ""action"", ""type"": ""string"", ""required"": true}}]}]}}], ""title"": ""Connect SlackAudit to Microsoft Sentinel\n\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true, ""action"": false}}], ""customs"": [{""name"": ""UserName, SlackAudit API Key & Action Type"", ""description"": ""To Generate the Access Token, create a new application in Slack, then add necessary scopes and configure the redirect URL. For detailed instructions on generating the access token, user name and action name limit, refer the [link](https://github.com/v-gsrihitha/v-gsrihitha/blob/main/SlackAudit/Readme.md).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SlackAudit/Data%20Connectors/SlackAuditLog_CCP/SlackAuditLog_ConnectorDefinition.json","true" +"","SlashNext","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SlashNext","slashnext1598548183597","slashnext-weblog-assessment-for-microsoft-sentinel","2022-08-12","2022-08-12","","SlashNext","Partner","https://support@slashnext.com","","domains","","","","","","","","false" +"","SlashNext SIEM","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SlashNext%20SIEM","slashnext1598548183597","slashnext-security-events-for-microsoft-sentinel","2023-05-26","2023-06-16","","SlashNext","Partner","https://slashnext.com/support","","domains","","","","","","","","false" +"Snowflake_CL","Snowflake","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake","azuresentinel","azure-sentinel-solution-snowflake","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SnowflakeDataConnector","Snowflake","[DEPRECATED] Snowflake","The Snowflake data connector provides the capability to ingest Snowflake [login logs](https://docs.snowflake.com/en/sql-reference/account-usage/login_history.html) and [query logs](https://docs.snowflake.com/en/sql-reference/account-usage/query_history.html) into Microsoft Sentinel using the Snowflake Python Connector. Refer to [Snowflake documentation](https://docs.snowflake.com/en/user-guide/python-connector.html) for more information.

NOTE: This data connector has been deprecated, consider moving to the CCF data connector available in the solution which replaces ingestion via the deprecated HTTP Data Collector API.

","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Azure Blob Storage API to pull logs into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**Snowflake**](https://aka.ms/sentinel-SnowflakeDataConnector-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Creating user in Snowflake**\n\nTo query data from Snowflake you need a user that is assigned to a role with sufficient privileges and a virtual warehouse cluster. The initial size of this cluster will be set to small but if it is insufficient, the cluster size can be increased as necessary.\n\n1. Enter the Snowflake console.\n2. Switch role to SECURITYADMIN and [create a new role](https://docs.snowflake.com/en/sql-reference/sql/create-role.html):\n```\nUSE ROLE SECURITYADMIN;\nCREATE OR REPLACE ROLE EXAMPLE_ROLE_NAME;```\n3. Switch role to SYSADMIN and [create warehouse](https://docs.snowflake.com/en/sql-reference/sql/create-warehouse.html) and [grand access](https://docs.snowflake.com/en/sql-reference/sql/grant-privilege.html) to it:\n```\nUSE ROLE SYSADMIN;\nCREATE OR REPLACE WAREHOUSE EXAMPLE_WAREHOUSE_NAME\n WAREHOUSE_SIZE = 'SMALL' \n AUTO_SUSPEND = 5\n AUTO_RESUME = true\n INITIALLY_SUSPENDED = true;\nGRANT USAGE, OPERATE ON WAREHOUSE EXAMPLE_WAREHOUSE_NAME TO ROLE EXAMPLE_ROLE_NAME;```\n4. Switch role to SECURITYADMIN and [create a new user](https://docs.snowflake.com/en/sql-reference/sql/create-user.html):\n```\nUSE ROLE SECURITYADMIN;\nCREATE OR REPLACE USER EXAMPLE_USER_NAME\n PASSWORD = 'example_password'\n DEFAULT_ROLE = EXAMPLE_ROLE_NAME\n DEFAULT_WAREHOUSE = EXAMPLE_WAREHOUSE_NAME\n;```\n5. Switch role to ACCOUNTADMIN and [grant access to snowflake database](https://docs.snowflake.com/en/sql-reference/account-usage.html#enabling-account-usage-for-other-roles) for role.\n```\nUSE ROLE ACCOUNTADMIN;\nGRANT IMPORTED PRIVILEGES ON DATABASE SNOWFLAKE TO ROLE EXAMPLE_ROLE_NAME;```\n6. Switch role to SECURITYADMIN and [assign role](https://docs.snowflake.com/en/sql-reference/sql/grant-role.html) to user:\n```\nUSE ROLE SECURITYADMIN;\nGRANT ROLE EXAMPLE_ROLE_NAME TO USER EXAMPLE_USER_NAME;```\n\n>**IMPORTANT:** Save user and API password created during this step as they will be used during deployment step.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as Snowflake credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SnowflakeDataConnector-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Snowflake Account Identifier**, **Snowflake User**, **Snowflake Password**, **Microsoft Sentinel Workspace Id**, **Microsoft Sentinel Shared Key**\n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**.\n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""1. Download the [Azure Function App](https://aka.ms/sentinel-SnowflakeDataConnector-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration. \n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select **+ New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tSNOWFLAKE_ACCOUNT\n\t\tSNOWFLAKE_USER\n\t\tSNOWFLAKE_PASSWORD\n\t\tWORKSPACE_ID\n\t\tSHARED_KEY\n\t\tlogAnalyticsUri (optional)\n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://WORKSPACE_ID.ods.opinsights.azure.us`. \n4. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Snowflake Credentials"", ""description"": ""**Snowflake Account Identifier**, **Snowflake User** and **Snowflake Password** are required for connection. See the documentation to learn more about [Snowflake Account Identifier](https://docs.snowflake.com/en/user-guide/admin-account-identifier.html#). Instructions on how to create user for this connector you can find below.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/Snowflake_API_FunctionApp.json","true" +"SnowflakeLoad_CL","Snowflake","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake","azuresentinel","azure-sentinel-solution-snowflake","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SnowflakeLogsCCPDefinition","Microsoft","Snowflake (via Codeless Connector Framework)","The Snowflake data connector provides the capability to ingest Snowflake [Login History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/login_history), [Query History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/query_history), [User-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_users), [Role-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_roles), [Load History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/load_history), [Materialized View Refresh History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/materialized_view_refresh_history), [Roles Logs](https://docs.snowflake.com/en/sql-reference/account-usage/roles), [Tables Logs](https://docs.snowflake.com/en/sql-reference/account-usage/tables), [Table Storage Metrics Logs](https://docs.snowflake.com/en/sql-reference/account-usage/table_storage_metrics), [Users Logs](https://docs.snowflake.com/en/sql-reference/account-usage/users) into Microsoft Sentinel using the Snowflake SQL API. Refer to [Snowflake SQL API documentation](https://docs.snowflake.com/en/developer-guide/sql-api/reference) for more information.","[{""title"": ""Connect Snowflake to Microsoft Sentinel"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": "">**NOTE:** To ensure data is presented in separate columns for each field, execute the parser using the **Snowflake()** function""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""To gather data from Snowflake, you need to provide the following resources""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Account Identifier \n To gather data from Snowflake, you'll need Snowflake Account Identifier.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Programmatic Access Token \n To gather data from Snowflake, you'll need the Snowflake Programmatic Access Token""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""For detailed instructions on retrieving the Account Identifier and Programmatic Access Token, please refer to the [Connector Tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/Readme.md).""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Account-Identifier"", ""columnValue"": ""properties.addOnAttributes.AccountId""}, {""columnName"": ""Table Name"", ""columnValue"": ""properties.dataType""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add Account"", ""title"": ""Add Account"", ""subtitle"": ""Add Account"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake Account Identifier"", ""placeholder"": ""Enter Snowflake Account Identifier"", ""type"": ""text"", ""name"": ""accountId"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake PAT"", ""placeholder"": ""Enter Snowflake PAT"", ""type"": ""password"", ""name"": ""apikey"", ""validations"": {""required"": true}}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/SnowflakeLogs_ccp/SnowflakeLogs_ConnectorDefinition.json","true" +"SnowflakeLogin_CL","Snowflake","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake","azuresentinel","azure-sentinel-solution-snowflake","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SnowflakeLogsCCPDefinition","Microsoft","Snowflake (via Codeless Connector Framework)","The Snowflake data connector provides the capability to ingest Snowflake [Login History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/login_history), [Query History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/query_history), [User-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_users), [Role-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_roles), [Load History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/load_history), [Materialized View Refresh History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/materialized_view_refresh_history), [Roles Logs](https://docs.snowflake.com/en/sql-reference/account-usage/roles), [Tables Logs](https://docs.snowflake.com/en/sql-reference/account-usage/tables), [Table Storage Metrics Logs](https://docs.snowflake.com/en/sql-reference/account-usage/table_storage_metrics), [Users Logs](https://docs.snowflake.com/en/sql-reference/account-usage/users) into Microsoft Sentinel using the Snowflake SQL API. Refer to [Snowflake SQL API documentation](https://docs.snowflake.com/en/developer-guide/sql-api/reference) for more information.","[{""title"": ""Connect Snowflake to Microsoft Sentinel"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": "">**NOTE:** To ensure data is presented in separate columns for each field, execute the parser using the **Snowflake()** function""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""To gather data from Snowflake, you need to provide the following resources""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Account Identifier \n To gather data from Snowflake, you'll need Snowflake Account Identifier.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Programmatic Access Token \n To gather data from Snowflake, you'll need the Snowflake Programmatic Access Token""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""For detailed instructions on retrieving the Account Identifier and Programmatic Access Token, please refer to the [Connector Tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/Readme.md).""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Account-Identifier"", ""columnValue"": ""properties.addOnAttributes.AccountId""}, {""columnName"": ""Table Name"", ""columnValue"": ""properties.dataType""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add Account"", ""title"": ""Add Account"", ""subtitle"": ""Add Account"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake Account Identifier"", ""placeholder"": ""Enter Snowflake Account Identifier"", ""type"": ""text"", ""name"": ""accountId"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake PAT"", ""placeholder"": ""Enter Snowflake PAT"", ""type"": ""password"", ""name"": ""apikey"", ""validations"": {""required"": true}}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/SnowflakeLogs_ccp/SnowflakeLogs_ConnectorDefinition.json","true" +"SnowflakeMaterializedView_CL","Snowflake","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake","azuresentinel","azure-sentinel-solution-snowflake","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SnowflakeLogsCCPDefinition","Microsoft","Snowflake (via Codeless Connector Framework)","The Snowflake data connector provides the capability to ingest Snowflake [Login History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/login_history), [Query History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/query_history), [User-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_users), [Role-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_roles), [Load History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/load_history), [Materialized View Refresh History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/materialized_view_refresh_history), [Roles Logs](https://docs.snowflake.com/en/sql-reference/account-usage/roles), [Tables Logs](https://docs.snowflake.com/en/sql-reference/account-usage/tables), [Table Storage Metrics Logs](https://docs.snowflake.com/en/sql-reference/account-usage/table_storage_metrics), [Users Logs](https://docs.snowflake.com/en/sql-reference/account-usage/users) into Microsoft Sentinel using the Snowflake SQL API. Refer to [Snowflake SQL API documentation](https://docs.snowflake.com/en/developer-guide/sql-api/reference) for more information.","[{""title"": ""Connect Snowflake to Microsoft Sentinel"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": "">**NOTE:** To ensure data is presented in separate columns for each field, execute the parser using the **Snowflake()** function""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""To gather data from Snowflake, you need to provide the following resources""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Account Identifier \n To gather data from Snowflake, you'll need Snowflake Account Identifier.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Programmatic Access Token \n To gather data from Snowflake, you'll need the Snowflake Programmatic Access Token""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""For detailed instructions on retrieving the Account Identifier and Programmatic Access Token, please refer to the [Connector Tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/Readme.md).""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Account-Identifier"", ""columnValue"": ""properties.addOnAttributes.AccountId""}, {""columnName"": ""Table Name"", ""columnValue"": ""properties.dataType""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add Account"", ""title"": ""Add Account"", ""subtitle"": ""Add Account"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake Account Identifier"", ""placeholder"": ""Enter Snowflake Account Identifier"", ""type"": ""text"", ""name"": ""accountId"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake PAT"", ""placeholder"": ""Enter Snowflake PAT"", ""type"": ""password"", ""name"": ""apikey"", ""validations"": {""required"": true}}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/SnowflakeLogs_ccp/SnowflakeLogs_ConnectorDefinition.json","true" +"SnowflakeQuery_CL","Snowflake","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake","azuresentinel","azure-sentinel-solution-snowflake","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SnowflakeLogsCCPDefinition","Microsoft","Snowflake (via Codeless Connector Framework)","The Snowflake data connector provides the capability to ingest Snowflake [Login History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/login_history), [Query History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/query_history), [User-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_users), [Role-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_roles), [Load History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/load_history), [Materialized View Refresh History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/materialized_view_refresh_history), [Roles Logs](https://docs.snowflake.com/en/sql-reference/account-usage/roles), [Tables Logs](https://docs.snowflake.com/en/sql-reference/account-usage/tables), [Table Storage Metrics Logs](https://docs.snowflake.com/en/sql-reference/account-usage/table_storage_metrics), [Users Logs](https://docs.snowflake.com/en/sql-reference/account-usage/users) into Microsoft Sentinel using the Snowflake SQL API. Refer to [Snowflake SQL API documentation](https://docs.snowflake.com/en/developer-guide/sql-api/reference) for more information.","[{""title"": ""Connect Snowflake to Microsoft Sentinel"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": "">**NOTE:** To ensure data is presented in separate columns for each field, execute the parser using the **Snowflake()** function""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""To gather data from Snowflake, you need to provide the following resources""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Account Identifier \n To gather data from Snowflake, you'll need Snowflake Account Identifier.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Programmatic Access Token \n To gather data from Snowflake, you'll need the Snowflake Programmatic Access Token""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""For detailed instructions on retrieving the Account Identifier and Programmatic Access Token, please refer to the [Connector Tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/Readme.md).""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Account-Identifier"", ""columnValue"": ""properties.addOnAttributes.AccountId""}, {""columnName"": ""Table Name"", ""columnValue"": ""properties.dataType""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add Account"", ""title"": ""Add Account"", ""subtitle"": ""Add Account"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake Account Identifier"", ""placeholder"": ""Enter Snowflake Account Identifier"", ""type"": ""text"", ""name"": ""accountId"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake PAT"", ""placeholder"": ""Enter Snowflake PAT"", ""type"": ""password"", ""name"": ""apikey"", ""validations"": {""required"": true}}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/SnowflakeLogs_ccp/SnowflakeLogs_ConnectorDefinition.json","true" +"SnowflakeRoleGrant_CL","Snowflake","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake","azuresentinel","azure-sentinel-solution-snowflake","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SnowflakeLogsCCPDefinition","Microsoft","Snowflake (via Codeless Connector Framework)","The Snowflake data connector provides the capability to ingest Snowflake [Login History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/login_history), [Query History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/query_history), [User-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_users), [Role-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_roles), [Load History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/load_history), [Materialized View Refresh History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/materialized_view_refresh_history), [Roles Logs](https://docs.snowflake.com/en/sql-reference/account-usage/roles), [Tables Logs](https://docs.snowflake.com/en/sql-reference/account-usage/tables), [Table Storage Metrics Logs](https://docs.snowflake.com/en/sql-reference/account-usage/table_storage_metrics), [Users Logs](https://docs.snowflake.com/en/sql-reference/account-usage/users) into Microsoft Sentinel using the Snowflake SQL API. Refer to [Snowflake SQL API documentation](https://docs.snowflake.com/en/developer-guide/sql-api/reference) for more information.","[{""title"": ""Connect Snowflake to Microsoft Sentinel"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": "">**NOTE:** To ensure data is presented in separate columns for each field, execute the parser using the **Snowflake()** function""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""To gather data from Snowflake, you need to provide the following resources""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Account Identifier \n To gather data from Snowflake, you'll need Snowflake Account Identifier.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Programmatic Access Token \n To gather data from Snowflake, you'll need the Snowflake Programmatic Access Token""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""For detailed instructions on retrieving the Account Identifier and Programmatic Access Token, please refer to the [Connector Tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/Readme.md).""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Account-Identifier"", ""columnValue"": ""properties.addOnAttributes.AccountId""}, {""columnName"": ""Table Name"", ""columnValue"": ""properties.dataType""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add Account"", ""title"": ""Add Account"", ""subtitle"": ""Add Account"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake Account Identifier"", ""placeholder"": ""Enter Snowflake Account Identifier"", ""type"": ""text"", ""name"": ""accountId"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake PAT"", ""placeholder"": ""Enter Snowflake PAT"", ""type"": ""password"", ""name"": ""apikey"", ""validations"": {""required"": true}}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/SnowflakeLogs_ccp/SnowflakeLogs_ConnectorDefinition.json","true" +"SnowflakeRoles_CL","Snowflake","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake","azuresentinel","azure-sentinel-solution-snowflake","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SnowflakeLogsCCPDefinition","Microsoft","Snowflake (via Codeless Connector Framework)","The Snowflake data connector provides the capability to ingest Snowflake [Login History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/login_history), [Query History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/query_history), [User-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_users), [Role-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_roles), [Load History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/load_history), [Materialized View Refresh History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/materialized_view_refresh_history), [Roles Logs](https://docs.snowflake.com/en/sql-reference/account-usage/roles), [Tables Logs](https://docs.snowflake.com/en/sql-reference/account-usage/tables), [Table Storage Metrics Logs](https://docs.snowflake.com/en/sql-reference/account-usage/table_storage_metrics), [Users Logs](https://docs.snowflake.com/en/sql-reference/account-usage/users) into Microsoft Sentinel using the Snowflake SQL API. Refer to [Snowflake SQL API documentation](https://docs.snowflake.com/en/developer-guide/sql-api/reference) for more information.","[{""title"": ""Connect Snowflake to Microsoft Sentinel"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": "">**NOTE:** To ensure data is presented in separate columns for each field, execute the parser using the **Snowflake()** function""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""To gather data from Snowflake, you need to provide the following resources""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Account Identifier \n To gather data from Snowflake, you'll need Snowflake Account Identifier.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Programmatic Access Token \n To gather data from Snowflake, you'll need the Snowflake Programmatic Access Token""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""For detailed instructions on retrieving the Account Identifier and Programmatic Access Token, please refer to the [Connector Tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/Readme.md).""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Account-Identifier"", ""columnValue"": ""properties.addOnAttributes.AccountId""}, {""columnName"": ""Table Name"", ""columnValue"": ""properties.dataType""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add Account"", ""title"": ""Add Account"", ""subtitle"": ""Add Account"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake Account Identifier"", ""placeholder"": ""Enter Snowflake Account Identifier"", ""type"": ""text"", ""name"": ""accountId"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake PAT"", ""placeholder"": ""Enter Snowflake PAT"", ""type"": ""password"", ""name"": ""apikey"", ""validations"": {""required"": true}}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/SnowflakeLogs_ccp/SnowflakeLogs_ConnectorDefinition.json","true" +"SnowflakeTableStorageMetrics_CL","Snowflake","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake","azuresentinel","azure-sentinel-solution-snowflake","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SnowflakeLogsCCPDefinition","Microsoft","Snowflake (via Codeless Connector Framework)","The Snowflake data connector provides the capability to ingest Snowflake [Login History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/login_history), [Query History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/query_history), [User-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_users), [Role-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_roles), [Load History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/load_history), [Materialized View Refresh History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/materialized_view_refresh_history), [Roles Logs](https://docs.snowflake.com/en/sql-reference/account-usage/roles), [Tables Logs](https://docs.snowflake.com/en/sql-reference/account-usage/tables), [Table Storage Metrics Logs](https://docs.snowflake.com/en/sql-reference/account-usage/table_storage_metrics), [Users Logs](https://docs.snowflake.com/en/sql-reference/account-usage/users) into Microsoft Sentinel using the Snowflake SQL API. Refer to [Snowflake SQL API documentation](https://docs.snowflake.com/en/developer-guide/sql-api/reference) for more information.","[{""title"": ""Connect Snowflake to Microsoft Sentinel"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": "">**NOTE:** To ensure data is presented in separate columns for each field, execute the parser using the **Snowflake()** function""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""To gather data from Snowflake, you need to provide the following resources""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Account Identifier \n To gather data from Snowflake, you'll need Snowflake Account Identifier.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Programmatic Access Token \n To gather data from Snowflake, you'll need the Snowflake Programmatic Access Token""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""For detailed instructions on retrieving the Account Identifier and Programmatic Access Token, please refer to the [Connector Tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/Readme.md).""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Account-Identifier"", ""columnValue"": ""properties.addOnAttributes.AccountId""}, {""columnName"": ""Table Name"", ""columnValue"": ""properties.dataType""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add Account"", ""title"": ""Add Account"", ""subtitle"": ""Add Account"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake Account Identifier"", ""placeholder"": ""Enter Snowflake Account Identifier"", ""type"": ""text"", ""name"": ""accountId"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake PAT"", ""placeholder"": ""Enter Snowflake PAT"", ""type"": ""password"", ""name"": ""apikey"", ""validations"": {""required"": true}}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/SnowflakeLogs_ccp/SnowflakeLogs_ConnectorDefinition.json","true" +"SnowflakeTables_CL","Snowflake","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake","azuresentinel","azure-sentinel-solution-snowflake","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SnowflakeLogsCCPDefinition","Microsoft","Snowflake (via Codeless Connector Framework)","The Snowflake data connector provides the capability to ingest Snowflake [Login History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/login_history), [Query History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/query_history), [User-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_users), [Role-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_roles), [Load History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/load_history), [Materialized View Refresh History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/materialized_view_refresh_history), [Roles Logs](https://docs.snowflake.com/en/sql-reference/account-usage/roles), [Tables Logs](https://docs.snowflake.com/en/sql-reference/account-usage/tables), [Table Storage Metrics Logs](https://docs.snowflake.com/en/sql-reference/account-usage/table_storage_metrics), [Users Logs](https://docs.snowflake.com/en/sql-reference/account-usage/users) into Microsoft Sentinel using the Snowflake SQL API. Refer to [Snowflake SQL API documentation](https://docs.snowflake.com/en/developer-guide/sql-api/reference) for more information.","[{""title"": ""Connect Snowflake to Microsoft Sentinel"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": "">**NOTE:** To ensure data is presented in separate columns for each field, execute the parser using the **Snowflake()** function""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""To gather data from Snowflake, you need to provide the following resources""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Account Identifier \n To gather data from Snowflake, you'll need Snowflake Account Identifier.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Programmatic Access Token \n To gather data from Snowflake, you'll need the Snowflake Programmatic Access Token""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""For detailed instructions on retrieving the Account Identifier and Programmatic Access Token, please refer to the [Connector Tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/Readme.md).""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Account-Identifier"", ""columnValue"": ""properties.addOnAttributes.AccountId""}, {""columnName"": ""Table Name"", ""columnValue"": ""properties.dataType""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add Account"", ""title"": ""Add Account"", ""subtitle"": ""Add Account"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake Account Identifier"", ""placeholder"": ""Enter Snowflake Account Identifier"", ""type"": ""text"", ""name"": ""accountId"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake PAT"", ""placeholder"": ""Enter Snowflake PAT"", ""type"": ""password"", ""name"": ""apikey"", ""validations"": {""required"": true}}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/SnowflakeLogs_ccp/SnowflakeLogs_ConnectorDefinition.json","true" +"SnowflakeUserGrant_CL","Snowflake","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake","azuresentinel","azure-sentinel-solution-snowflake","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SnowflakeLogsCCPDefinition","Microsoft","Snowflake (via Codeless Connector Framework)","The Snowflake data connector provides the capability to ingest Snowflake [Login History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/login_history), [Query History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/query_history), [User-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_users), [Role-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_roles), [Load History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/load_history), [Materialized View Refresh History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/materialized_view_refresh_history), [Roles Logs](https://docs.snowflake.com/en/sql-reference/account-usage/roles), [Tables Logs](https://docs.snowflake.com/en/sql-reference/account-usage/tables), [Table Storage Metrics Logs](https://docs.snowflake.com/en/sql-reference/account-usage/table_storage_metrics), [Users Logs](https://docs.snowflake.com/en/sql-reference/account-usage/users) into Microsoft Sentinel using the Snowflake SQL API. Refer to [Snowflake SQL API documentation](https://docs.snowflake.com/en/developer-guide/sql-api/reference) for more information.","[{""title"": ""Connect Snowflake to Microsoft Sentinel"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": "">**NOTE:** To ensure data is presented in separate columns for each field, execute the parser using the **Snowflake()** function""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""To gather data from Snowflake, you need to provide the following resources""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Account Identifier \n To gather data from Snowflake, you'll need Snowflake Account Identifier.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Programmatic Access Token \n To gather data from Snowflake, you'll need the Snowflake Programmatic Access Token""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""For detailed instructions on retrieving the Account Identifier and Programmatic Access Token, please refer to the [Connector Tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/Readme.md).""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Account-Identifier"", ""columnValue"": ""properties.addOnAttributes.AccountId""}, {""columnName"": ""Table Name"", ""columnValue"": ""properties.dataType""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add Account"", ""title"": ""Add Account"", ""subtitle"": ""Add Account"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake Account Identifier"", ""placeholder"": ""Enter Snowflake Account Identifier"", ""type"": ""text"", ""name"": ""accountId"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake PAT"", ""placeholder"": ""Enter Snowflake PAT"", ""type"": ""password"", ""name"": ""apikey"", ""validations"": {""required"": true}}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/SnowflakeLogs_ccp/SnowflakeLogs_ConnectorDefinition.json","true" +"SnowflakeUsers_CL","Snowflake","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake","azuresentinel","azure-sentinel-solution-snowflake","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SnowflakeLogsCCPDefinition","Microsoft","Snowflake (via Codeless Connector Framework)","The Snowflake data connector provides the capability to ingest Snowflake [Login History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/login_history), [Query History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/query_history), [User-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_users), [Role-Grant Logs](https://docs.snowflake.com/en/sql-reference/account-usage/grants_to_roles), [Load History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/load_history), [Materialized View Refresh History Logs](https://docs.snowflake.com/en/sql-reference/account-usage/materialized_view_refresh_history), [Roles Logs](https://docs.snowflake.com/en/sql-reference/account-usage/roles), [Tables Logs](https://docs.snowflake.com/en/sql-reference/account-usage/tables), [Table Storage Metrics Logs](https://docs.snowflake.com/en/sql-reference/account-usage/table_storage_metrics), [Users Logs](https://docs.snowflake.com/en/sql-reference/account-usage/users) into Microsoft Sentinel using the Snowflake SQL API. Refer to [Snowflake SQL API documentation](https://docs.snowflake.com/en/developer-guide/sql-api/reference) for more information.","[{""title"": ""Connect Snowflake to Microsoft Sentinel"", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": "">**NOTE:** To ensure data is presented in separate columns for each field, execute the parser using the **Snowflake()** function""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""To gather data from Snowflake, you need to provide the following resources""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. Account Identifier \n To gather data from Snowflake, you'll need Snowflake Account Identifier.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Programmatic Access Token \n To gather data from Snowflake, you'll need the Snowflake Programmatic Access Token""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""For detailed instructions on retrieving the Account Identifier and Programmatic Access Token, please refer to the [Connector Tutorial](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/Readme.md).""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnName"": ""Account-Identifier"", ""columnValue"": ""properties.addOnAttributes.AccountId""}, {""columnName"": ""Table Name"", ""columnValue"": ""properties.dataType""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""isPrimary"": true, ""label"": ""Add Account"", ""title"": ""Add Account"", ""subtitle"": ""Add Account"", ""contextPaneType"": ""DataConnectorsContextPane"", ""instructionSteps"": [{""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake Account Identifier"", ""placeholder"": ""Enter Snowflake Account Identifier"", ""type"": ""text"", ""name"": ""accountId"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Snowflake PAT"", ""placeholder"": ""Enter Snowflake PAT"", ""type"": ""password"", ""name"": ""apikey"", ""validations"": {""required"": true}}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true, ""action"": false}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Snowflake/Data%20Connectors/SnowflakeLogs_ccp/SnowflakeLogs_ConnectorDefinition.json","true" +"CommonSecurityLog","SonicWall Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SonicWall%20Firewall","sonicwall-inc","sonicwall-networksecurity-azure-sentinal","2022-05-06","","","SonicWall","Partner","https://www.sonicwall.com/support/","","domains","SonicWallFirewall","SonicWall","[Deprecated] SonicWall Firewall via Legacy Agent","Common Event Format (CEF) is an industry standard format on top of Syslog messages, used by SonicWall to allow event interoperability among different platforms. By connecting your CEF logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward SonicWall Firewall Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Set your SonicWall Firewall to send Syslog messages in CEF format to the proxy machine. Make sure you send the logs to port 514 TCP on the machine's IP address.\n\n Follow Instructions . Then Make sure you select local use 4 as the facility. Then select ArcSight as the Syslog format.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SonicWall%20Firewall/Data%20Connectors/SonicwallFirewall.json","true" +"CommonSecurityLog","SonicWall Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SonicWall%20Firewall","sonicwall-inc","sonicwall-networksecurity-azure-sentinal","2022-05-06","","","SonicWall","Partner","https://www.sonicwall.com/support/","","domains","SonicWallFirewallAma","SonicWall","[Deprecated] SonicWall Firewall via AMA","Common Event Format (CEF) is an industry standard format on top of Syslog messages, used by SonicWall to allow event interoperability among different platforms. By connecting your CEF logs to Microsoft Sentinel, you can take advantage of search & correlation, alerting, and threat intelligence enrichment for each log.","[{""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine""}, {""title"": ""Step B. Forward SonicWall Firewall Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Set your SonicWall Firewall to send Syslog messages in CEF format to the proxy machine. Make sure you send the logs to port 514 TCP on the machine's IP address.\n\n Follow Instructions . Then Make sure you select local use 4 as the facility. Then select ArcSight as the Syslog format.""}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SonicWall%20Firewall/Data%20Connectors/template_SonicwallFirewallAMA.json","true" +"Sonrai_Tickets_CL","SonraiSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SonraiSecurity","sonraisecurityllc1584373214489","sonrai_sentinel_offer","2021-10-18","","","Sonrai","Partner","","","domains","SonraiDataConnector","Sonrai","Sonrai Data Connector","Use this data connector to integrate with Sonrai Security and get Sonrai tickets sent directly to Microsoft Sentinel.","[{""title"": ""Sonrai Security Data Connector"", ""description"": ""1. Navigate to Sonrai Security dashboard.\n2. On the bottom left panel, click on integrations.\n3. Select Microsoft Sentinel from the list of available Integrations.\n4. Fill in the form using the information provided below."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SonraiSecurity/Data%20Connectors/Connector_REST_API_Sonrai.json","true" +"SophosCloudOptix_CL","Sophos Cloud Optix","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Sophos%20Cloud%20Optix","sophos","sophos_cloud_optix_mss","2022-05-02","","","Sophos","Partner","https://www.sophos.com/en-us/support","","domains","SophosCloudOptix","Sophos","Sophos Cloud Optix","The [Sophos Cloud Optix](https://www.sophos.com/products/cloud-optix.aspx) connector allows you to easily connect your Sophos Cloud Optix logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's cloud security and compliance posture and improves your cloud security operation capabilities.","[{""title"": ""1. Get the Workspace ID and the Primary Key"", ""description"": ""Copy the Workspace ID and Primary Key for your workspace.\n"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""2. Configure the Sophos Cloud Optix Integration"", ""description"": ""In Sophos Cloud Optix go to [Settings->Integrations->Microsoft Sentinel](https://optix.sophos.com/#/integrations/sentinel) and enter the Workspace ID and Primary Key copied in Step 1.\n""}, {""title"": ""3. Select Alert Levels"", ""description"": ""In Alert Levels, select which Sophos Cloud Optix alerts you want to send to Microsoft Sentinel.\n""}, {""title"": ""4. Turn on the integration"", ""description"": ""To turn on the integration, select Enable, and then click Save.\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Sophos%20Cloud%20Optix/Data%20Connectors/Connector_REST_API_SophosCloudOptix.json","true" +"SophosEP_CL","Sophos Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Sophos%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-sophosep","2021-07-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","SophosEP","Sophos","Sophos Endpoint Protection","The [Sophos Endpoint Protection](https://www.sophos.com/en-us/products/endpoint-antivirus.aspx) data connector provides the capability to ingest [Sophos events](https://docs.sophos.com/central/Customer/help/en-us/central/Customer/common/concepts/Events.html) into Microsoft Sentinel. Refer to [Sophos Central Admin documentation](https://docs.sophos.com/central/Customer/help/en-us/central/Customer/concepts/Logs.html) for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Sophos Central APIs to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**SophosEPEvent**](https://aka.ms/sentinel-SophosEP-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Sophos Central API**\n\n Follow the instructions to obtain the credentials.\n\n1. In Sophos Central Admin, go to **Global Settings > API Token Management**.\n2. To create a new token, click **Add token** from the top-right corner of the screen.\n3. Select a **token name** and click **Save**. The **API Token Summary** for this token is displayed.\n4. Click **Copy** to copy your **API Access URL + Headers** from the **API Token Summary** section into your clipboard.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Sophos Endpoint Protection data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Sophos Endpoint Protection data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-SophosEP-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **Sophos API Access URL and Headers**, **AzureSentinelWorkspaceId**, **AzureSentinelSharedKey**. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Sophos Endpoint Protection data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-SophosEP-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n3. In the **Application settings** tab, select **New application setting**.\n4. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tSOPHOS_TOKEN\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n5. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**API token** is required. [See the documentation to learn more about API token](https://docs.sophos.com/central/Customer/help/en-us/central/Customer/concepts/ep_ApiTokenManagement.html)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Sophos%20Endpoint%20Protection/Data%20Connectors/SophosEP_API_FunctionApp.json","true" +"SophosEPAlerts_CL","Sophos Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Sophos%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-sophosep","2021-07-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","SophosEndpointProtectionCCPDefinition","Microsoft","Sophos Endpoint Protection (using REST API)","The [Sophos Endpoint Protection](https://www.sophos.com/en-us/products/endpoint-antivirus.aspx) data connector provides the capability to ingest [Sophos events](https://developer.sophos.com/docs/siem-v1/1/routes/events/get) and [Sophos alerts](https://developer.sophos.com/docs/siem-v1/1/routes/alerts/get) into Microsoft Sentinel. Refer to [Sophos Central Admin documentation](https://docs.sophos.com/central/Customer/help/en-us/central/Customer/concepts/Logs.html) for more information.","[{""description"": ""Follow [Sophos instructions](https://developer.sophos.com/getting-started-tenant) to create a service principal with access to the Sophos API. It will need the Service Principal ReadOnly role.\n Through those instructions, you should get the Client ID, Client Secret, Tenant ID and data region.\n Fill the form bellow with that information."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Sophos Tenant ID"", ""placeholder"": ""Sophos Tenant ID"", ""type"": ""text"", ""name"": ""sophosTenantId""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Sophos Tenant Data Region"", ""placeholder"": ""eu01, eu02, us01, us02 or us03"", ""type"": ""text"", ""name"": ""sophosRegion""}}, {""type"": ""OAuthForm"", ""parameters"": {""clientIdLabel"": ""Client ID"", ""clientSecretLabel"": ""Client Secret"", ""connectButtonLabel"": ""Connect"", ""disconnectButtonLabel"": ""Disconnect""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""name"", ""columnName"": ""Name""}, {""columnValue"": ""id"", ""columnName"": ""ID""}]}}], ""title"": ""Connect to Sophos Endpoint Protection API to start collecting event and alert logs in Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Sophos Endpoint Protection API access"", ""description"": ""Access to the Sophos Endpoint Protection API through a service principal is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Sophos%20Endpoint%20Protection/Data%20Connectors/SophosEP_ccp/SophosEP_DataConnectorDefinition.json","true" +"SophosEPEvents_CL","Sophos Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Sophos%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-sophosep","2021-07-07","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","SophosEndpointProtectionCCPDefinition","Microsoft","Sophos Endpoint Protection (using REST API)","The [Sophos Endpoint Protection](https://www.sophos.com/en-us/products/endpoint-antivirus.aspx) data connector provides the capability to ingest [Sophos events](https://developer.sophos.com/docs/siem-v1/1/routes/events/get) and [Sophos alerts](https://developer.sophos.com/docs/siem-v1/1/routes/alerts/get) into Microsoft Sentinel. Refer to [Sophos Central Admin documentation](https://docs.sophos.com/central/Customer/help/en-us/central/Customer/concepts/Logs.html) for more information.","[{""description"": ""Follow [Sophos instructions](https://developer.sophos.com/getting-started-tenant) to create a service principal with access to the Sophos API. It will need the Service Principal ReadOnly role.\n Through those instructions, you should get the Client ID, Client Secret, Tenant ID and data region.\n Fill the form bellow with that information."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Sophos Tenant ID"", ""placeholder"": ""Sophos Tenant ID"", ""type"": ""text"", ""name"": ""sophosTenantId""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Sophos Tenant Data Region"", ""placeholder"": ""eu01, eu02, us01, us02 or us03"", ""type"": ""text"", ""name"": ""sophosRegion""}}, {""type"": ""OAuthForm"", ""parameters"": {""clientIdLabel"": ""Client ID"", ""clientSecretLabel"": ""Client Secret"", ""connectButtonLabel"": ""Connect"", ""disconnectButtonLabel"": ""Disconnect""}}, {""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""name"", ""columnName"": ""Name""}, {""columnValue"": ""id"", ""columnName"": ""ID""}]}}], ""title"": ""Connect to Sophos Endpoint Protection API to start collecting event and alert logs in Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Sophos Endpoint Protection API access"", ""description"": ""Access to the Sophos Endpoint Protection API through a service principal is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Sophos%20Endpoint%20Protection/Data%20Connectors/SophosEP_ccp/SophosEP_DataConnectorDefinition.json","true" +"Syslog","Sophos XG Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Sophos%20XG%20Firewall","azuresentinel","azure-sentinel-solution-sophosxgfirewall","2021-10-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","SophosXGFirewall","Sophos","[Deprecated] Sophos XG Firewall","The [Sophos XG Firewall](https://www.sophos.com/products/next-gen-firewall.aspx) allows you to easily connect your Sophos XG Firewall logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigations. Integrating Sophos XG Firewall with Microsoft Sentinel provides more visibility into your organization's firewall traffic and will enhance security monitoring capabilities.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Sophos XG Firewall and load the function code or click [here](https://aka.ms/sentinel-SophosXG-parser), on the second line of the query, enter the hostname(s) of your Sophos XG Firewall device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n 2. Select **Apply below configuration to my machines** and select the facilities and severities.\n 3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure and connect the Sophos XG Firewall"", ""description"": ""[Follow these instructions](https://doc.sophos.com/nsg/sophos-firewall/20.0/Help/en-us/webhelp/onlinehelp/AdministratorHelp/SystemServices/LogSettings/SyslogServerAdd/index.html) to enable syslog streaming. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Sophos XG Firewall"", ""description"": ""must be configured to export logs via Syslog""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Sophos%20XG%20Firewall/Data%20Connectors/Connector_Syslog_SophosXGFirewall.json","true" +"","SpyCloud Enterprise Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SpyCloud%20Enterprise%20Protection","spycloudinc1680448518850","azure-sentinel-solution-spycloudenterprise","2023-09-09","","","Spycloud","Partner","https://portal.spycloud.com","","domains","","","","","","","","false" +"secRMM_CL","Squadra Technologies SecRmm","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Squadra%20Technologies%20SecRmm","squadratechnologies","squadratechnologies.secrmmsentinel","2022-05-09","","","Squadra Technologies","Partner","https://www.squadratechnologies.com/Contact.aspx","","domains","SquadraTechnologiesSecRMM","Squadra Technologies","Squadra Technologies secRMM","Use the Squadra Technologies secRMM Data Connector to push USB removable storage security event data into Microsoft Sentinel Log Analytics.","[{""title"": """", ""description"": ""Follow the step-by-step instructions provided in the [Squadra Technologies configuration guide for Microsoft Sentinel](https://www.squadratechnologies.com/StaticContent/ProductDownload/secRMM/9.11.0.0/secRMMAzureSentinelAdministratorGuide.pdf)"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Squadra%20Technologies%20SecRmm/Data%20Connectors/SquadraTechnologiesSecRMM.json","true" +"SquidProxy_CL","SquidProxy","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SquidProxy","azuresentinel","azure-sentinel-solution-squidproxy","2022-05-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","SquidProxy","Squid","[Deprecated] Squid Proxy","The [Squid Proxy](http://www.squid-cache.org/) connector allows you to easily connect your Squid Proxy logs with Microsoft Sentinel. This gives you more insight into your organization's network proxy traffic and improves your security operation capabilities.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Squid Proxy and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SquidProxy/Parsers/SquidProxy.txt), on the second line of the query, enter the hostname(s) of your SquidProxy device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Squid Proxy server where the logs are generated.\n\n> Logs from Squid Proxy deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the custom log directory to be collected"", ""instructions"": [{""parameters"": {""linkType"": ""OpenCustomLogsSettings""}, ""type"": ""InstallAgent""}]}, {""title"": """", ""description"": ""1. Select the link above to open your workspace advanced settings \n2. From the left pane, select **Data**, select **Custom Logs** and click **Add+**\n3. Click **Browse** to upload a sample of a Squid Proxy log file(e.g. access.log or cache.log). Then, click **Next >**\n4. Select **New line** as the record delimiter and click **Next >**\n5. Select **Windows** or **Linux** and enter the path to Squid Proxy logs. Default paths are: \n - **Windows** directory: `C:\\Squid\\var\\log\\squid\\*.log`\n - **Linux** Directory: `/var/log/squid/*.log` \n6. After entering the path, click the '+' symbol to apply, then click **Next >** \n7. Add **SquidProxy_CL** as the custom log Name and click **Done**""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SquidProxy/Data%20Connectors/Connector_CustomLog_SquidProxy.json","true" +"StyxViewAlerts_CL","Styx Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Styx%20Intelligence","styx_intelligence","microsoft-sentinel-solution-styxintelligence","2025-02-07","","","Styx Intelligence","Partner","https://www.styxintel.com/contact-us/","","domains","StyxViewEndpointConnectorDefinition","Styx Intelligence","StyxView Alerts (via Codeless Connector Platform)","The [StyxView Alerts](https://styxintel.com/) data connector enables seamless integration between the StyxView Alerts platform and Microsoft Sentinel. This connector ingests alert data from the StyxView Alerts API, allowing organizations to centralize and correlate actionable threat intelligence directly within their Microsoft Sentinel workspace.","[{""description"": ""Contact Styx Intelligence Support (support.team@styxintel.com) to get access to an API key."", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""API Token"", ""placeholder"": ""API Token"", ""type"": ""password"", ""name"": ""APIKey""}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""title"": ""Connect to StyxView Alerts API to start collecting alert logs in Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""StyxView Alert API access"", ""description"": ""Access to the StyxView Alerts API through an API key is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Styx%20Intelligence/Data%20Connectors/Alerts/StyxView%20Alerts_ConnectorDefinition.json","true" +"Syslog","Symantec Endpoint Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Symantec%20Endpoint%20Protection","azuresentinel","azure-sentinel-solution-symantecendpointprotection","2022-07-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SymantecEndpointProtection","Broadcom","[Deprecated] Symantec Endpoint Protection","The [Broadcom Symantec Endpoint Protection (SEP)](https://www.broadcom.com/products/cyber-security/endpoint/end-user/enterprise) connector allows you to easily connect your SEP logs with Microsoft Sentinel. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Symantec Endpoint Protection and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Symantec%20Endpoint%20Protection/Parsers/SymantecEndpointProtection.yaml), on the second line of the query, enter the hostname(s) of your SymantecEndpointProtection device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n 2. Select **Apply below configuration to my machines** and select the facilities and severities.\n 3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure and connect the Symantec Endpoint Protection"", ""description"": ""[Follow these instructions](https://techdocs.broadcom.com/us/en/symantec-security-software/endpoint-security-and-management/endpoint-protection/all/Monitoring-Reporting-and-Enforcing-Compliance/viewing-logs-v7522439-d37e464/exporting-data-to-a-syslog-server-v8442743-d15e1107.html) to configure the Symantec Endpoint Protection to forward syslog. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Symantec Endpoint Protection (SEP)"", ""description"": ""must be configured to export logs via Syslog""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Symantec%20Endpoint%20Protection/Data%20Connectors/Connector_Syslog_SymantecEndpointProtection.json","true" +"","Symantec Integrated Cyber Defense","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Symantec%20Integrated%20Cyber%20Defense","azuresentinel","symantec_icdx_mss","2022-06-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"Syslog","Symantec VIP","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Symantec%20VIP","azuresentinel","azure-sentinel-solution-symantecvip","2022-05-16","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SymantecVIP","Symantec","[Deprecated] Symantec VIP","The [Symantec VIP](https://vip.symantec.com/) connector allows you to easily connect your Symantec VIP logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's network and improves your security operation capabilities.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Symantec VIP and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Symantec%20VIP/Parsers/SymantecVIP.yaml), on the second line of the query, enter the hostname(s) of your Symantec VIP device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n 2. Select **Apply below configuration to my machines** and select the facilities and severities.\n 3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure and connect the Symantec VIP"", ""description"": ""[Follow these instructions](https://aka.ms/sentinel-symantecvip-configurationsteps) to configure the Symantec VIP Enterprise Gateway to forward syslog. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Symantec VIP"", ""description"": ""must be configured to export logs via Syslog""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Symantec%20VIP/Data%20Connectors/Connector_Syslog_SymantecVIP.json","true" +"Syslog","SymantecProxySG","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SymantecProxySG","azuresentinel","azure-sentinel-symantec-proxysg","2021-05-25","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","SymantecProxySG","Symantec","[Deprecated] Symantec ProxySG","The [Symantec ProxySG](https://www.broadcom.com/products/cyber-security/network/gateway/proxy-sg-and-advanced-secure-gateway) allows you to easily connect your Symantec ProxySG logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigations. Integrating Symantec ProxySG with Microsoft Sentinel provides more visibility into your organization's network proxy traffic and will enhance security monitoring capabilities.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Symantec Proxy SG and load the function code or click [here](https://aka.ms/sentinel-SymantecProxySG-parser), on the second line of the query, enter the hostname(s) of your Symantec Proxy SG device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n 2. Select **Apply below configuration to my machines** and select the facilities and severities.\n 3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure and connect the Symantec ProxySG"", ""description"": "" \n 1. Log in to the Blue Coat Management Console .\n 2. Select Configuration > Access Logging > Formats.\n 3. Select New.\n 4. Enter a unique name in the Format Name field.\n 5. Click the radio button for **Custom format string** and paste the following string into the field.\n

1 $(date) $(time) $(time-taken) $(c-ip) $(cs-userdn) $(cs-auth-groups) $(x-exception-id) $(sc-filter-result) $(cs-categories) $(quot)$(cs(Referer))$(quot) $(sc-status) $(s-action) $(cs-method) $(quot)$(rs(Content-Type))$(quot) $(cs-uri-scheme) $(cs-host) $(cs-uri-port) $(cs-uri-path) $(cs-uri-query) $(cs-uri-extension) $(quot)$(cs(User-Agent))$(quot) $(s-ip) $(sr-bytes) $(rs-bytes) $(x-virus-id) $(x-bluecoat-application-name) $(x-bluecoat-application-operation) $(cs-uri-port) $(x-cs-client-ip-country) $(cs-threat-risk)

\n 6. Click the **OK** button. \n 7. Click the **Apply** button. \n 8. [Follow these instructions](https://knowledge.broadcom.com/external/article/166529/sending-access-logs-to-a-syslog-server.html) to enable syslog streaming of **Access** Logs. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Symantec ProxySG"", ""description"": ""must be configured to export logs via Syslog""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/SymantecProxySG/Data%20Connectors/Connector_Syslog_SymantecProxySG.json","true" +"","Synack","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Synack","","","","","","","","","","","","","","","","","","false" +"Syslog","Syslog","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Syslog","azuresentinel","azure-sentinel-solution-syslog","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","Syslog","Microsoft","Syslog via Legacy Agent","Syslog is an event logging protocol that is common to Linux. Applications will send messages that may be stored on the local machine or delivered to a Syslog collector. When the Agent for Linux is installed, it configures the local Syslog daemon to forward messages to the agent. The agent then sends the message to the workspace.

[Learn more >](https://aka.ms/sysLogInfo)","[{""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""You can collect Syslog events from your local machine by installing the agent on it. You can also collect Syslog generated on a different source by running the installation script below on the local machine, where the agent is installed.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n\n1. Select the link below to open your workspace **agents configuration**, and select the **Syslog** tab.\n2. Select **Add facility** and choose from the drop-down list of facilities. Repeat for all the facilities you want to add.\n3. Mark the check boxes for the desired severities for each facility.\n4. Click **Apply**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Syslog/Data%20Connectors/template_Syslog.json","true" +"Syslog","Syslog","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Syslog","azuresentinel","azure-sentinel-solution-syslog","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","SyslogAma","Microsoft","Syslog via AMA","Syslog is an event logging protocol that is common to Linux. Applications will send messages that may be stored on the local machine or delivered to a Syslog collector. When the Agent for Linux is installed, it configures the local Syslog daemon to forward messages to the agent. The agent then sends the message to the workspace.

[Learn more >](https://aka.ms/sysLogInfo)","[{""title"": ""Enable data collection rule\u200b"", ""description"": ""You can collect Syslog events from your local machine by installing the agent on it. You can also collect Syslog generated on a different source by running the installation script below on the local machine, where the agent is installed.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""type"": ""SysLogAma""}]}, {""instructions"": [{""parameters"": {""linkType"": ""OpenCreateDataCollectionRule"", ""dataCollectionRuleType"": 6}, ""type"": ""InstallAgent""}]}, {""title"": ""Run the following command to install and apply the Syslog collector:"", ""description"": ""> To collect logs generated on a different machine run this script on the machine where the agent is installed."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces/datasources"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace data sources"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Syslog/Data%20Connectors/template_SyslogAma.json","true" +"Talon_CL","Talon","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Talon","taloncybersecurityltd1654088115170","talonconnector","2023-01-25","","","Talon Security","Partner","https://docs.console.talon-sec.com/","","domains","TalonLogs","Talon Security","Talon Insights","The Talon Security Logs connector allows you to easily connect your Talon events and audit logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation.","[{""title"": """", ""description"": ""Please note the values below and follow the instructions here to connect your Talon Security events and audit logs with Microsoft Sentinel."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Talon/Data%20Connectors/TalonLogs.json","true" +"","Tanium","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tanium","taniuminc1646329360287","tanium_sentinel_connector","2022-05-16","2025-07-03","","Tanium Inc.","Partner","https://support.tanium.com","","domains","","","","","","","","false" +"Cymru_Scout_Account_Usage_Data_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Team Cymru Scout API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create Team Cymru Scout API Key**\n\n Follow these instructions to create a Team Cymru Scout API Key.\n 1. Refer to the [API Keys](https://scout.cymru.com/docs/api#api-keys) document to generate an API key to use as an alternate form of authorization.\n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TeamCymruScout Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Upload csv with indictaors in Watchlist**\n\n Follow the steps in this section to upload csv containing indicators in watchlist:\n 1. In the Azure portal, Go to **Microsoft Sentinel** and select your workspace.\n 2. Go to **Watchlist** under **Configuration** section from left panel.\n 3. Click on **TeamCymruScoutDomainData**, and then select **Bulk update** from **Update watchlist**.\n 4. Upload your csv files with domain indicators in **Upload file** input and click on **Next: Review+Create**.\n 5. Once validation is successful, click on **Update**.\n 6. Follow the same steps to update *TeamCymruScoutIPData* watchlist for ip indicators. \n\n> **Reference link:** [Bulk update a watchlist](https://learn.microsoft.com/en-us/azure/sentinel/watchlists-manage#bulk-update-a-watchlist)""}, {""title"": """", ""description"": ""**STEP 7 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TeamCymruScout data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TeamCymruScout-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tLocation \n\t\tWorkspaceName \n\t\tFunction Name \n\t\tTeamCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tUsername \n\t\tPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAzureClientId \n\t\tAzureClientSecret \n\t\tTenantId \n\t\tAzureEntraObjectId \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TeamCymruScout data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TeamCymruScout310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CymruScoutXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tTeamCymruScoutUsername \n\t\tTeamCymruScoutPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAZURE_CLIENT_ID \n\t\tAZURE_CLIENT_SECRET \n\t\tAZURE_TENANT_ID \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n\t\tAZURE_DATA_COLLECTION_ENDPOINT \n\t\tAZURE_DATA_COLLECTION_RULE_ID_MAIN_TABLES \n\t\tAZURE_DATA_COLLECTION_RULE_ID_SUB_TABLES\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Permission to assign a role to the registered application"", ""description"": ""Permission to assign a role to the registered application in Microsoft Entra ID is required.""}, {""name"": ""Team Cymru Scout Credentials/permissions"", ""description"": ""Team Cymru Scout account credentials(Username, Password) is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" +"Cymru_Scout_Domain_Data_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Team Cymru Scout API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create Team Cymru Scout API Key**\n\n Follow these instructions to create a Team Cymru Scout API Key.\n 1. Refer to the [API Keys](https://scout.cymru.com/docs/api#api-keys) document to generate an API key to use as an alternate form of authorization.\n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TeamCymruScout Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Upload csv with indictaors in Watchlist**\n\n Follow the steps in this section to upload csv containing indicators in watchlist:\n 1. In the Azure portal, Go to **Microsoft Sentinel** and select your workspace.\n 2. Go to **Watchlist** under **Configuration** section from left panel.\n 3. Click on **TeamCymruScoutDomainData**, and then select **Bulk update** from **Update watchlist**.\n 4. Upload your csv files with domain indicators in **Upload file** input and click on **Next: Review+Create**.\n 5. Once validation is successful, click on **Update**.\n 6. Follow the same steps to update *TeamCymruScoutIPData* watchlist for ip indicators. \n\n> **Reference link:** [Bulk update a watchlist](https://learn.microsoft.com/en-us/azure/sentinel/watchlists-manage#bulk-update-a-watchlist)""}, {""title"": """", ""description"": ""**STEP 7 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TeamCymruScout data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TeamCymruScout-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tLocation \n\t\tWorkspaceName \n\t\tFunction Name \n\t\tTeamCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tUsername \n\t\tPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAzureClientId \n\t\tAzureClientSecret \n\t\tTenantId \n\t\tAzureEntraObjectId \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TeamCymruScout data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TeamCymruScout310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CymruScoutXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tTeamCymruScoutUsername \n\t\tTeamCymruScoutPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAZURE_CLIENT_ID \n\t\tAZURE_CLIENT_SECRET \n\t\tAZURE_TENANT_ID \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n\t\tAZURE_DATA_COLLECTION_ENDPOINT \n\t\tAZURE_DATA_COLLECTION_RULE_ID_MAIN_TABLES \n\t\tAZURE_DATA_COLLECTION_RULE_ID_SUB_TABLES\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Permission to assign a role to the registered application"", ""description"": ""Permission to assign a role to the registered application in Microsoft Entra ID is required.""}, {""name"": ""Team Cymru Scout Credentials/permissions"", ""description"": ""Team Cymru Scout account credentials(Username, Password) is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" +"Cymru_Scout_IP_Data_Communications_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Team Cymru Scout API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create Team Cymru Scout API Key**\n\n Follow these instructions to create a Team Cymru Scout API Key.\n 1. Refer to the [API Keys](https://scout.cymru.com/docs/api#api-keys) document to generate an API key to use as an alternate form of authorization.\n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TeamCymruScout Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Upload csv with indictaors in Watchlist**\n\n Follow the steps in this section to upload csv containing indicators in watchlist:\n 1. In the Azure portal, Go to **Microsoft Sentinel** and select your workspace.\n 2. Go to **Watchlist** under **Configuration** section from left panel.\n 3. Click on **TeamCymruScoutDomainData**, and then select **Bulk update** from **Update watchlist**.\n 4. Upload your csv files with domain indicators in **Upload file** input and click on **Next: Review+Create**.\n 5. Once validation is successful, click on **Update**.\n 6. Follow the same steps to update *TeamCymruScoutIPData* watchlist for ip indicators. \n\n> **Reference link:** [Bulk update a watchlist](https://learn.microsoft.com/en-us/azure/sentinel/watchlists-manage#bulk-update-a-watchlist)""}, {""title"": """", ""description"": ""**STEP 7 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TeamCymruScout data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TeamCymruScout-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tLocation \n\t\tWorkspaceName \n\t\tFunction Name \n\t\tTeamCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tUsername \n\t\tPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAzureClientId \n\t\tAzureClientSecret \n\t\tTenantId \n\t\tAzureEntraObjectId \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TeamCymruScout data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TeamCymruScout310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CymruScoutXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tTeamCymruScoutUsername \n\t\tTeamCymruScoutPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAZURE_CLIENT_ID \n\t\tAZURE_CLIENT_SECRET \n\t\tAZURE_TENANT_ID \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n\t\tAZURE_DATA_COLLECTION_ENDPOINT \n\t\tAZURE_DATA_COLLECTION_RULE_ID_MAIN_TABLES \n\t\tAZURE_DATA_COLLECTION_RULE_ID_SUB_TABLES\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Permission to assign a role to the registered application"", ""description"": ""Permission to assign a role to the registered application in Microsoft Entra ID is required.""}, {""name"": ""Team Cymru Scout Credentials/permissions"", ""description"": ""Team Cymru Scout account credentials(Username, Password) is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" +"Cymru_Scout_IP_Data_Details_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Team Cymru Scout API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create Team Cymru Scout API Key**\n\n Follow these instructions to create a Team Cymru Scout API Key.\n 1. Refer to the [API Keys](https://scout.cymru.com/docs/api#api-keys) document to generate an API key to use as an alternate form of authorization.\n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TeamCymruScout Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Upload csv with indictaors in Watchlist**\n\n Follow the steps in this section to upload csv containing indicators in watchlist:\n 1. In the Azure portal, Go to **Microsoft Sentinel** and select your workspace.\n 2. Go to **Watchlist** under **Configuration** section from left panel.\n 3. Click on **TeamCymruScoutDomainData**, and then select **Bulk update** from **Update watchlist**.\n 4. Upload your csv files with domain indicators in **Upload file** input and click on **Next: Review+Create**.\n 5. Once validation is successful, click on **Update**.\n 6. Follow the same steps to update *TeamCymruScoutIPData* watchlist for ip indicators. \n\n> **Reference link:** [Bulk update a watchlist](https://learn.microsoft.com/en-us/azure/sentinel/watchlists-manage#bulk-update-a-watchlist)""}, {""title"": """", ""description"": ""**STEP 7 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TeamCymruScout data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TeamCymruScout-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tLocation \n\t\tWorkspaceName \n\t\tFunction Name \n\t\tTeamCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tUsername \n\t\tPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAzureClientId \n\t\tAzureClientSecret \n\t\tTenantId \n\t\tAzureEntraObjectId \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TeamCymruScout data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TeamCymruScout310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CymruScoutXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tTeamCymruScoutUsername \n\t\tTeamCymruScoutPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAZURE_CLIENT_ID \n\t\tAZURE_CLIENT_SECRET \n\t\tAZURE_TENANT_ID \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n\t\tAZURE_DATA_COLLECTION_ENDPOINT \n\t\tAZURE_DATA_COLLECTION_RULE_ID_MAIN_TABLES \n\t\tAZURE_DATA_COLLECTION_RULE_ID_SUB_TABLES\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Permission to assign a role to the registered application"", ""description"": ""Permission to assign a role to the registered application in Microsoft Entra ID is required.""}, {""name"": ""Team Cymru Scout Credentials/permissions"", ""description"": ""Team Cymru Scout account credentials(Username, Password) is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" +"Cymru_Scout_IP_Data_Fingerprints_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Team Cymru Scout API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create Team Cymru Scout API Key**\n\n Follow these instructions to create a Team Cymru Scout API Key.\n 1. Refer to the [API Keys](https://scout.cymru.com/docs/api#api-keys) document to generate an API key to use as an alternate form of authorization.\n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TeamCymruScout Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Upload csv with indictaors in Watchlist**\n\n Follow the steps in this section to upload csv containing indicators in watchlist:\n 1. In the Azure portal, Go to **Microsoft Sentinel** and select your workspace.\n 2. Go to **Watchlist** under **Configuration** section from left panel.\n 3. Click on **TeamCymruScoutDomainData**, and then select **Bulk update** from **Update watchlist**.\n 4. Upload your csv files with domain indicators in **Upload file** input and click on **Next: Review+Create**.\n 5. Once validation is successful, click on **Update**.\n 6. Follow the same steps to update *TeamCymruScoutIPData* watchlist for ip indicators. \n\n> **Reference link:** [Bulk update a watchlist](https://learn.microsoft.com/en-us/azure/sentinel/watchlists-manage#bulk-update-a-watchlist)""}, {""title"": """", ""description"": ""**STEP 7 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TeamCymruScout data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TeamCymruScout-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tLocation \n\t\tWorkspaceName \n\t\tFunction Name \n\t\tTeamCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tUsername \n\t\tPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAzureClientId \n\t\tAzureClientSecret \n\t\tTenantId \n\t\tAzureEntraObjectId \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TeamCymruScout data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TeamCymruScout310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CymruScoutXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tTeamCymruScoutUsername \n\t\tTeamCymruScoutPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAZURE_CLIENT_ID \n\t\tAZURE_CLIENT_SECRET \n\t\tAZURE_TENANT_ID \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n\t\tAZURE_DATA_COLLECTION_ENDPOINT \n\t\tAZURE_DATA_COLLECTION_RULE_ID_MAIN_TABLES \n\t\tAZURE_DATA_COLLECTION_RULE_ID_SUB_TABLES\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Permission to assign a role to the registered application"", ""description"": ""Permission to assign a role to the registered application in Microsoft Entra ID is required.""}, {""name"": ""Team Cymru Scout Credentials/permissions"", ""description"": ""Team Cymru Scout account credentials(Username, Password) is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" +"Cymru_Scout_IP_Data_Foundation_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Team Cymru Scout API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create Team Cymru Scout API Key**\n\n Follow these instructions to create a Team Cymru Scout API Key.\n 1. Refer to the [API Keys](https://scout.cymru.com/docs/api#api-keys) document to generate an API key to use as an alternate form of authorization.\n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TeamCymruScout Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Upload csv with indictaors in Watchlist**\n\n Follow the steps in this section to upload csv containing indicators in watchlist:\n 1. In the Azure portal, Go to **Microsoft Sentinel** and select your workspace.\n 2. Go to **Watchlist** under **Configuration** section from left panel.\n 3. Click on **TeamCymruScoutDomainData**, and then select **Bulk update** from **Update watchlist**.\n 4. Upload your csv files with domain indicators in **Upload file** input and click on **Next: Review+Create**.\n 5. Once validation is successful, click on **Update**.\n 6. Follow the same steps to update *TeamCymruScoutIPData* watchlist for ip indicators. \n\n> **Reference link:** [Bulk update a watchlist](https://learn.microsoft.com/en-us/azure/sentinel/watchlists-manage#bulk-update-a-watchlist)""}, {""title"": """", ""description"": ""**STEP 7 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TeamCymruScout data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TeamCymruScout-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tLocation \n\t\tWorkspaceName \n\t\tFunction Name \n\t\tTeamCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tUsername \n\t\tPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAzureClientId \n\t\tAzureClientSecret \n\t\tTenantId \n\t\tAzureEntraObjectId \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TeamCymruScout data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TeamCymruScout310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CymruScoutXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tTeamCymruScoutUsername \n\t\tTeamCymruScoutPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAZURE_CLIENT_ID \n\t\tAZURE_CLIENT_SECRET \n\t\tAZURE_TENANT_ID \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n\t\tAZURE_DATA_COLLECTION_ENDPOINT \n\t\tAZURE_DATA_COLLECTION_RULE_ID_MAIN_TABLES \n\t\tAZURE_DATA_COLLECTION_RULE_ID_SUB_TABLES\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Permission to assign a role to the registered application"", ""description"": ""Permission to assign a role to the registered application in Microsoft Entra ID is required.""}, {""name"": ""Team Cymru Scout Credentials/permissions"", ""description"": ""Team Cymru Scout account credentials(Username, Password) is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" +"Cymru_Scout_IP_Data_OpenPorts_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Team Cymru Scout API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create Team Cymru Scout API Key**\n\n Follow these instructions to create a Team Cymru Scout API Key.\n 1. Refer to the [API Keys](https://scout.cymru.com/docs/api#api-keys) document to generate an API key to use as an alternate form of authorization.\n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TeamCymruScout Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Upload csv with indictaors in Watchlist**\n\n Follow the steps in this section to upload csv containing indicators in watchlist:\n 1. In the Azure portal, Go to **Microsoft Sentinel** and select your workspace.\n 2. Go to **Watchlist** under **Configuration** section from left panel.\n 3. Click on **TeamCymruScoutDomainData**, and then select **Bulk update** from **Update watchlist**.\n 4. Upload your csv files with domain indicators in **Upload file** input and click on **Next: Review+Create**.\n 5. Once validation is successful, click on **Update**.\n 6. Follow the same steps to update *TeamCymruScoutIPData* watchlist for ip indicators. \n\n> **Reference link:** [Bulk update a watchlist](https://learn.microsoft.com/en-us/azure/sentinel/watchlists-manage#bulk-update-a-watchlist)""}, {""title"": """", ""description"": ""**STEP 7 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TeamCymruScout data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TeamCymruScout-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tLocation \n\t\tWorkspaceName \n\t\tFunction Name \n\t\tTeamCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tUsername \n\t\tPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAzureClientId \n\t\tAzureClientSecret \n\t\tTenantId \n\t\tAzureEntraObjectId \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TeamCymruScout data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TeamCymruScout310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CymruScoutXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tTeamCymruScoutUsername \n\t\tTeamCymruScoutPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAZURE_CLIENT_ID \n\t\tAZURE_CLIENT_SECRET \n\t\tAZURE_TENANT_ID \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n\t\tAZURE_DATA_COLLECTION_ENDPOINT \n\t\tAZURE_DATA_COLLECTION_RULE_ID_MAIN_TABLES \n\t\tAZURE_DATA_COLLECTION_RULE_ID_SUB_TABLES\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Permission to assign a role to the registered application"", ""description"": ""Permission to assign a role to the registered application in Microsoft Entra ID is required.""}, {""name"": ""Team Cymru Scout Credentials/permissions"", ""description"": ""Team Cymru Scout account credentials(Username, Password) is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" +"Cymru_Scout_IP_Data_PDNS_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Team Cymru Scout API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create Team Cymru Scout API Key**\n\n Follow these instructions to create a Team Cymru Scout API Key.\n 1. Refer to the [API Keys](https://scout.cymru.com/docs/api#api-keys) document to generate an API key to use as an alternate form of authorization.\n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TeamCymruScout Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Upload csv with indictaors in Watchlist**\n\n Follow the steps in this section to upload csv containing indicators in watchlist:\n 1. In the Azure portal, Go to **Microsoft Sentinel** and select your workspace.\n 2. Go to **Watchlist** under **Configuration** section from left panel.\n 3. Click on **TeamCymruScoutDomainData**, and then select **Bulk update** from **Update watchlist**.\n 4. Upload your csv files with domain indicators in **Upload file** input and click on **Next: Review+Create**.\n 5. Once validation is successful, click on **Update**.\n 6. Follow the same steps to update *TeamCymruScoutIPData* watchlist for ip indicators. \n\n> **Reference link:** [Bulk update a watchlist](https://learn.microsoft.com/en-us/azure/sentinel/watchlists-manage#bulk-update-a-watchlist)""}, {""title"": """", ""description"": ""**STEP 7 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TeamCymruScout data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TeamCymruScout-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tLocation \n\t\tWorkspaceName \n\t\tFunction Name \n\t\tTeamCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tUsername \n\t\tPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAzureClientId \n\t\tAzureClientSecret \n\t\tTenantId \n\t\tAzureEntraObjectId \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TeamCymruScout data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TeamCymruScout310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CymruScoutXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tTeamCymruScoutUsername \n\t\tTeamCymruScoutPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAZURE_CLIENT_ID \n\t\tAZURE_CLIENT_SECRET \n\t\tAZURE_TENANT_ID \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n\t\tAZURE_DATA_COLLECTION_ENDPOINT \n\t\tAZURE_DATA_COLLECTION_RULE_ID_MAIN_TABLES \n\t\tAZURE_DATA_COLLECTION_RULE_ID_SUB_TABLES\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Permission to assign a role to the registered application"", ""description"": ""Permission to assign a role to the registered application in Microsoft Entra ID is required.""}, {""name"": ""Team Cymru Scout Credentials/permissions"", ""description"": ""Team Cymru Scout account credentials(Username, Password) is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" +"Cymru_Scout_IP_Data_Summary_Certs_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Team Cymru Scout API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create Team Cymru Scout API Key**\n\n Follow these instructions to create a Team Cymru Scout API Key.\n 1. Refer to the [API Keys](https://scout.cymru.com/docs/api#api-keys) document to generate an API key to use as an alternate form of authorization.\n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TeamCymruScout Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Upload csv with indictaors in Watchlist**\n\n Follow the steps in this section to upload csv containing indicators in watchlist:\n 1. In the Azure portal, Go to **Microsoft Sentinel** and select your workspace.\n 2. Go to **Watchlist** under **Configuration** section from left panel.\n 3. Click on **TeamCymruScoutDomainData**, and then select **Bulk update** from **Update watchlist**.\n 4. Upload your csv files with domain indicators in **Upload file** input and click on **Next: Review+Create**.\n 5. Once validation is successful, click on **Update**.\n 6. Follow the same steps to update *TeamCymruScoutIPData* watchlist for ip indicators. \n\n> **Reference link:** [Bulk update a watchlist](https://learn.microsoft.com/en-us/azure/sentinel/watchlists-manage#bulk-update-a-watchlist)""}, {""title"": """", ""description"": ""**STEP 7 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TeamCymruScout data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TeamCymruScout-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tLocation \n\t\tWorkspaceName \n\t\tFunction Name \n\t\tTeamCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tUsername \n\t\tPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAzureClientId \n\t\tAzureClientSecret \n\t\tTenantId \n\t\tAzureEntraObjectId \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TeamCymruScout data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TeamCymruScout310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CymruScoutXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tTeamCymruScoutUsername \n\t\tTeamCymruScoutPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAZURE_CLIENT_ID \n\t\tAZURE_CLIENT_SECRET \n\t\tAZURE_TENANT_ID \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n\t\tAZURE_DATA_COLLECTION_ENDPOINT \n\t\tAZURE_DATA_COLLECTION_RULE_ID_MAIN_TABLES \n\t\tAZURE_DATA_COLLECTION_RULE_ID_SUB_TABLES\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Permission to assign a role to the registered application"", ""description"": ""Permission to assign a role to the registered application in Microsoft Entra ID is required.""}, {""name"": ""Team Cymru Scout Credentials/permissions"", ""description"": ""Team Cymru Scout account credentials(Username, Password) is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" +"Cymru_Scout_IP_Data_Summary_Details_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Team Cymru Scout API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create Team Cymru Scout API Key**\n\n Follow these instructions to create a Team Cymru Scout API Key.\n 1. Refer to the [API Keys](https://scout.cymru.com/docs/api#api-keys) document to generate an API key to use as an alternate form of authorization.\n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TeamCymruScout Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Upload csv with indictaors in Watchlist**\n\n Follow the steps in this section to upload csv containing indicators in watchlist:\n 1. In the Azure portal, Go to **Microsoft Sentinel** and select your workspace.\n 2. Go to **Watchlist** under **Configuration** section from left panel.\n 3. Click on **TeamCymruScoutDomainData**, and then select **Bulk update** from **Update watchlist**.\n 4. Upload your csv files with domain indicators in **Upload file** input and click on **Next: Review+Create**.\n 5. Once validation is successful, click on **Update**.\n 6. Follow the same steps to update *TeamCymruScoutIPData* watchlist for ip indicators. \n\n> **Reference link:** [Bulk update a watchlist](https://learn.microsoft.com/en-us/azure/sentinel/watchlists-manage#bulk-update-a-watchlist)""}, {""title"": """", ""description"": ""**STEP 7 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TeamCymruScout data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TeamCymruScout-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tLocation \n\t\tWorkspaceName \n\t\tFunction Name \n\t\tTeamCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tUsername \n\t\tPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAzureClientId \n\t\tAzureClientSecret \n\t\tTenantId \n\t\tAzureEntraObjectId \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TeamCymruScout data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TeamCymruScout310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CymruScoutXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tTeamCymruScoutUsername \n\t\tTeamCymruScoutPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAZURE_CLIENT_ID \n\t\tAZURE_CLIENT_SECRET \n\t\tAZURE_TENANT_ID \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n\t\tAZURE_DATA_COLLECTION_ENDPOINT \n\t\tAZURE_DATA_COLLECTION_RULE_ID_MAIN_TABLES \n\t\tAZURE_DATA_COLLECTION_RULE_ID_SUB_TABLES\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Permission to assign a role to the registered application"", ""description"": ""Permission to assign a role to the registered application in Microsoft Entra ID is required.""}, {""name"": ""Team Cymru Scout Credentials/permissions"", ""description"": ""Team Cymru Scout account credentials(Username, Password) is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" +"Cymru_Scout_IP_Data_Summary_Fingerprints_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Team Cymru Scout API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create Team Cymru Scout API Key**\n\n Follow these instructions to create a Team Cymru Scout API Key.\n 1. Refer to the [API Keys](https://scout.cymru.com/docs/api#api-keys) document to generate an API key to use as an alternate form of authorization.\n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TeamCymruScout Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Upload csv with indictaors in Watchlist**\n\n Follow the steps in this section to upload csv containing indicators in watchlist:\n 1. In the Azure portal, Go to **Microsoft Sentinel** and select your workspace.\n 2. Go to **Watchlist** under **Configuration** section from left panel.\n 3. Click on **TeamCymruScoutDomainData**, and then select **Bulk update** from **Update watchlist**.\n 4. Upload your csv files with domain indicators in **Upload file** input and click on **Next: Review+Create**.\n 5. Once validation is successful, click on **Update**.\n 6. Follow the same steps to update *TeamCymruScoutIPData* watchlist for ip indicators. \n\n> **Reference link:** [Bulk update a watchlist](https://learn.microsoft.com/en-us/azure/sentinel/watchlists-manage#bulk-update-a-watchlist)""}, {""title"": """", ""description"": ""**STEP 7 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TeamCymruScout data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TeamCymruScout-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tLocation \n\t\tWorkspaceName \n\t\tFunction Name \n\t\tTeamCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tUsername \n\t\tPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAzureClientId \n\t\tAzureClientSecret \n\t\tTenantId \n\t\tAzureEntraObjectId \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TeamCymruScout data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TeamCymruScout310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CymruScoutXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tTeamCymruScoutUsername \n\t\tTeamCymruScoutPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAZURE_CLIENT_ID \n\t\tAZURE_CLIENT_SECRET \n\t\tAZURE_TENANT_ID \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n\t\tAZURE_DATA_COLLECTION_ENDPOINT \n\t\tAZURE_DATA_COLLECTION_RULE_ID_MAIN_TABLES \n\t\tAZURE_DATA_COLLECTION_RULE_ID_SUB_TABLES\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Permission to assign a role to the registered application"", ""description"": ""Permission to assign a role to the registered application in Microsoft Entra ID is required.""}, {""name"": ""Team Cymru Scout Credentials/permissions"", ""description"": ""Team Cymru Scout account credentials(Username, Password) is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" +"Cymru_Scout_IP_Data_Summary_OpenPorts_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Team Cymru Scout API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create Team Cymru Scout API Key**\n\n Follow these instructions to create a Team Cymru Scout API Key.\n 1. Refer to the [API Keys](https://scout.cymru.com/docs/api#api-keys) document to generate an API key to use as an alternate form of authorization.\n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TeamCymruScout Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Upload csv with indictaors in Watchlist**\n\n Follow the steps in this section to upload csv containing indicators in watchlist:\n 1. In the Azure portal, Go to **Microsoft Sentinel** and select your workspace.\n 2. Go to **Watchlist** under **Configuration** section from left panel.\n 3. Click on **TeamCymruScoutDomainData**, and then select **Bulk update** from **Update watchlist**.\n 4. Upload your csv files with domain indicators in **Upload file** input and click on **Next: Review+Create**.\n 5. Once validation is successful, click on **Update**.\n 6. Follow the same steps to update *TeamCymruScoutIPData* watchlist for ip indicators. \n\n> **Reference link:** [Bulk update a watchlist](https://learn.microsoft.com/en-us/azure/sentinel/watchlists-manage#bulk-update-a-watchlist)""}, {""title"": """", ""description"": ""**STEP 7 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TeamCymruScout data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TeamCymruScout-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tLocation \n\t\tWorkspaceName \n\t\tFunction Name \n\t\tTeamCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tUsername \n\t\tPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAzureClientId \n\t\tAzureClientSecret \n\t\tTenantId \n\t\tAzureEntraObjectId \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TeamCymruScout data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TeamCymruScout310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CymruScoutXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tTeamCymruScoutUsername \n\t\tTeamCymruScoutPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAZURE_CLIENT_ID \n\t\tAZURE_CLIENT_SECRET \n\t\tAZURE_TENANT_ID \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n\t\tAZURE_DATA_COLLECTION_ENDPOINT \n\t\tAZURE_DATA_COLLECTION_RULE_ID_MAIN_TABLES \n\t\tAZURE_DATA_COLLECTION_RULE_ID_SUB_TABLES\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Permission to assign a role to the registered application"", ""description"": ""Permission to assign a role to the registered application in Microsoft Entra ID is required.""}, {""name"": ""Team Cymru Scout Credentials/permissions"", ""description"": ""Team Cymru Scout account credentials(Username, Password) is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" +"Cymru_Scout_IP_Data_Summary_PDNS_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Team Cymru Scout API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create Team Cymru Scout API Key**\n\n Follow these instructions to create a Team Cymru Scout API Key.\n 1. Refer to the [API Keys](https://scout.cymru.com/docs/api#api-keys) document to generate an API key to use as an alternate form of authorization.\n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TeamCymruScout Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Upload csv with indictaors in Watchlist**\n\n Follow the steps in this section to upload csv containing indicators in watchlist:\n 1. In the Azure portal, Go to **Microsoft Sentinel** and select your workspace.\n 2. Go to **Watchlist** under **Configuration** section from left panel.\n 3. Click on **TeamCymruScoutDomainData**, and then select **Bulk update** from **Update watchlist**.\n 4. Upload your csv files with domain indicators in **Upload file** input and click on **Next: Review+Create**.\n 5. Once validation is successful, click on **Update**.\n 6. Follow the same steps to update *TeamCymruScoutIPData* watchlist for ip indicators. \n\n> **Reference link:** [Bulk update a watchlist](https://learn.microsoft.com/en-us/azure/sentinel/watchlists-manage#bulk-update-a-watchlist)""}, {""title"": """", ""description"": ""**STEP 7 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TeamCymruScout data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TeamCymruScout-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tLocation \n\t\tWorkspaceName \n\t\tFunction Name \n\t\tTeamCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tUsername \n\t\tPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAzureClientId \n\t\tAzureClientSecret \n\t\tTenantId \n\t\tAzureEntraObjectId \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TeamCymruScout data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TeamCymruScout310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CymruScoutXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tTeamCymruScoutUsername \n\t\tTeamCymruScoutPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAZURE_CLIENT_ID \n\t\tAZURE_CLIENT_SECRET \n\t\tAZURE_TENANT_ID \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n\t\tAZURE_DATA_COLLECTION_ENDPOINT \n\t\tAZURE_DATA_COLLECTION_RULE_ID_MAIN_TABLES \n\t\tAZURE_DATA_COLLECTION_RULE_ID_SUB_TABLES\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Permission to assign a role to the registered application"", ""description"": ""Permission to assign a role to the registered application in Microsoft Entra ID is required.""}, {""name"": ""Team Cymru Scout Credentials/permissions"", ""description"": ""Team Cymru Scout account credentials(Username, Password) is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" +"Cymru_Scout_IP_Data_x509_CL","Team Cymru Scout","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout","team-cymru","teamcymruscout_sentinel","2024-07-16","2025-05-16","","Team Cymru","Partner","http://team-cymru.com","","domains","TeamCymruScout","Team Cymru Scout","Team Cymru Scout Data Connector","The [TeamCymruScout](https://scout.cymru.com/) Data Connector allows users to bring Team Cymru Scout IP, domain and account usage data in Microsoft Sentinel for enrichment.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Team Cymru Scout API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Steps to Create Team Cymru Scout API Key**\n\n Follow these instructions to create a Team Cymru Scout API Key.\n 1. Refer to the [API Keys](https://scout.cymru.com/docs/api#api-keys) document to generate an API key to use as an alternate form of authorization.\n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TeamCymruScout Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TeamCymruScout Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Upload csv with indictaors in Watchlist**\n\n Follow the steps in this section to upload csv containing indicators in watchlist:\n 1. In the Azure portal, Go to **Microsoft Sentinel** and select your workspace.\n 2. Go to **Watchlist** under **Configuration** section from left panel.\n 3. Click on **TeamCymruScoutDomainData**, and then select **Bulk update** from **Update watchlist**.\n 4. Upload your csv files with domain indicators in **Upload file** input and click on **Next: Review+Create**.\n 5. Once validation is successful, click on **Update**.\n 6. Follow the same steps to update *TeamCymruScoutIPData* watchlist for ip indicators. \n\n> **Reference link:** [Bulk update a watchlist](https://learn.microsoft.com/en-us/azure/sentinel/watchlists-manage#bulk-update-a-watchlist)""}, {""title"": """", ""description"": ""**STEP 7 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TeamCymruScout data connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TeamCymruScout-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tLocation \n\t\tWorkspaceName \n\t\tFunction Name \n\t\tTeamCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tUsername \n\t\tPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAzureClientId \n\t\tAzureClientSecret \n\t\tTenantId \n\t\tAzureEntraObjectId \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TeamCymruScout data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TeamCymruScout310-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. CymruScoutXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tCymruScoutBaseURL \n\t\tAuthenticationType \n\t\tTeamCymruScoutUsername \n\t\tTeamCymruScoutPassword \n\t\tAPIKey \n\t\tIPValues \n\t\tDomainValues \n\t\tAPIType \n\t\tAZURE_CLIENT_ID \n\t\tAZURE_CLIENT_SECRET \n\t\tAZURE_TENANT_ID \n\t\tIPTableName \n\t\tDomainTableName \n\t\tAccountUsageTableName \n\t\tSchedule \n\t\tAccountUsageSchedule \n\t\tLogLevel \n\t\tAZURE_DATA_COLLECTION_ENDPOINT \n\t\tAZURE_DATA_COLLECTION_RULE_ID_MAIN_TABLES \n\t\tAZURE_DATA_COLLECTION_RULE_ID_SUB_TABLES\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Permission to assign a role to the registered application"", ""description"": ""Permission to assign a role to the registered application in Microsoft Entra ID is required.""}, {""name"": ""Team Cymru Scout Credentials/permissions"", ""description"": ""Team Cymru Scout account credentials(Username, Password) is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Team%20Cymru%20Scout/Data%20Connectors/TeamCymruScout/TeamCymruScout_API_FunctionApp.json","true" +"","Teams","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Teams","sentinel4teams","sentinelforteams","2022-02-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"","Templates","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Templates","","","","","","","","","","","","","","","","","","false" +"Tenable_IE_CL","Tenable App","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tenable%20App","tenable","tenable-sentinel-integration","2024-06-06","2025-06-19","","Tenable","Partner","https://www.tenable.com/support/technical-support","","domains","TenableIE","Tenable","Tenable Identity Exposure","Tenable Identity Exposure connector allows Indicators of Exposure, Indicators of Attack and trailflow logs to be ingested into Microsoft Sentinel.The different work books and data parsers allow you to more easily manipulate logs and monitor your Active Directory environment. The analytic templates allow you to automate responses regarding different events, exposures and attacks.","[{""title"": """", ""description"": "">This data connector depends on [afad_parser](https://aka.ms/sentinel-TenableApp-afad-parser) based on a Kusto Function to work as expected which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Configure the Syslog server"", ""description"": ""You will first need a **linux Syslog** server that TenableIE will send logs to. Typically you can run **rsyslog** on **Ubuntu**.\n You can then configure this server as you wish, but it is recommended to be able to output TenableIE logs in a separate file.\n\nConfigure rsyslog to accept logs from your TenableIE IP address.:\n\n```shell\nsudo -i\n\n# Set TenableIE source IP address\nexport TENABLE_IE_IP={Enter your IP address}\n\n# Create rsyslog configuration file\ncat > /etc/rsyslog.d/80-tenable.conf << EOF\n\\$ModLoad imudp\n\\$UDPServerRun 514\n\\$ModLoad imtcp\n\\$InputTCPServerRun 514\n\\$AllowedSender TCP, 127.0.0.1, $TENABLE_IE_IP\n\\$AllowedSender UDP, 127.0.0.1, $TENABLE_IE_IP\n\\$template MsgTemplate,\""%TIMESTAMP:::date-rfc3339% %HOSTNAME% %programname%[%procid%]:%msg%\\n\""\n\\$template remote-incoming-logs, \""/var/log/%PROGRAMNAME%.log\""\n*.* ?remote-incoming-logs;MsgTemplate\nEOF\n\n# Restart rsyslog\nsystemctl restart rsyslog\n```""}, {""title"": ""2. Install and onboard the Microsoft agent for Linux"", ""description"": ""The OMS agent will receive the TenableIE syslog events and publish it in Microsoft Sentinel :"", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""3. Check agent logs on the Syslog server"", ""description"": ""```shell\ntail -f /var/opt/microsoft/omsagent/log/omsagent.log\n```""}, {""title"": ""4. Configure TenableIE to send logs to your Syslog server"", ""description"": ""On your **TenableIE** portal, go to *System*, *Configuration* and then *Syslog*.\nFrom there you can create a new Syslog alert toward your Syslog server.\n\nOnce this is done, check that the logs are correctly gathered on your server in a separate file (to do this, you can use the *Test the configuration* button in the Syslog alert configuration in TenableIE).\nIf you used the Quickstart template, the Syslog server will by default listen on port 514 in UDP and 1514 in TCP, without TLS.""}, {""title"": ""5. Configure the custom logs"", ""description"": ""Configure the agent to collect the logs.\n\n1. In Microsoft Sentinel, go to **Configuration** -> **Settings** -> **Workspace settings** -> **Custom logs**.\n2. Click **Add custom log**.\n3. Upload a sample TenableIE.log Syslog file from the **Linux** machine running the **Syslog** server and click **Next**\n4. Set the record delimiter to **New Line** if not already the case and click **Next**.\n5. Select **Linux** and enter the file path to the **Syslog** file, click **+** then **Next**. The default location of the file is `/var/log/TenableIE.log` if you have a Tenable version <3.1.0, you must also add this linux file location `/var/log/AlsidForAD.log`.\n6. Set the **Name** to *Tenable_IE_CL* (Azure automatically adds *_CL* at the end of the name, there must be only one, make sure the name is not *Tenable_IE_CL_CL*).\n7. Click **Next**, you will see a resume, then click **Create**\n"", ""instructions"": []}, {""title"": ""6. Enjoy !"", ""description"": ""> You should now be able to receive logs in the *Tenable_IE_CL* table, logs data can be parse using the **afad_parser()** function, used by all query samples, workbooks and analytic templates.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Access to TenableIE Configuration"", ""description"": ""Permissions to configure syslog alerting engine""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tenable%20App/Data%20Connectors/TenableIE/TenableIE.json","true" +"Tenable_VM_Asset_CL","Tenable App","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tenable%20App","tenable","tenable-sentinel-integration","2024-06-06","2025-06-19","","Tenable","Partner","https://www.tenable.com/support/technical-support","","domains","TenableVM","Tenable","Tenable Vulnerability Management","The TVM data connector provides the ability to ingest Asset, Vulnerability, Compliance, WAS assets and WAS vulnerabilities data into Microsoft Sentinel using TVM REST APIs. Refer to [API documentation](https://developer.tenable.com/reference) for more information. The connector provides the ability to get data which helps to examine potential security risks, get insight into your computing assets, diagnose configuration problems and more","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Durable Functions to connect to the TenableVM API to pull [assets](https://developer.tenable.com/reference#exports-assets-download-chunk), [vulnerabilities](https://developer.tenable.com/reference#exports-vulns-request-export) and [compliance](https://developer.tenable.com/reference#exports-compliance-request-export)(if selected) at a regular interval into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a [**TenableVM parser for vulnerabilities**](https://aka.ms/sentinel-TenableApp-TenableVMVulnerabilities-parser) and a [**TenableVM parser for assets**](https://aka.ms/sentinel-TenableApp-TenableVMAssets-parser) based on a Kusto Function to work as expected which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for TenableVM**\n\n [Follow the instructions](https://docs.tenable.com/vulnerability-management/Content/Settings/my-account/GenerateAPIKey.htm) to obtain the required API credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TenableVM Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function App**\n\n""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TenableVM Vulnerability Management Report data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TenableVM-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-TenableVM-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group**, **FunctionApp Name** and **Location**. \n3. Enter the below information : \n\n\t a. **WorkspaceName** - Enter the Workspace Name of the log analytics Workspace. \n\n\t b. **TenableAccessKey** - Enter Access key for using the Tenable API. \n\n\t c. **TenableSecretKey** - Enter Tenable Secret Key for Authentication. \n\n\t d. **AzureClientID** - Enter Azure Client ID. \n\n\t e. **AzureClientSecret** - Enter Azure Client Secret. \n\n\t f. **TenantID** - Enter Tenant ID got from above steps. \n\n\t g. **AzureEntraObjectId** - Enter Azure Object ID got from above steps. \n\n\t h. **LowestSeveritytoStore** - Lowest vulnerability severity to store. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t i. **ComplianceDataIngestion** - Select true if you want to enable Compliance data ingestion from Tenable VM. Default is false. \n\n\t j. **WASAssetDataIngestion** - Select true if you want to enable WAS Asset data ingestion from Tenable VM. Default is false. \n\n\t k. **WASVulnerabilityDataIngestion** - Select true if you want to enable WAS Vulnerability data ingestion from Tenable VM. Default is false. \n\n\t l. **LowestSeveritytoStoreWAS** - The Lowest Vulnerability severity to store for WAS. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t m. **TenableExportScheduleInMinutes** - Schedule in minutes to create new export job from Tenable VM. Default is 1440. \n\n\t n. **AssetTableName** - Enter name of the table used to store Asset Data logs. \n\n\t o. **VulnTableName** - Enter name of the table used to store Vulnerability Data logs. \n\n\t p. **ComplianceTableName** - Enter name of the table used to store Compliance Data logs. \n\n\t q. **WASAssetTableName** - Enter name of the table used to store WAS Asset Data logs. \n\n\t r. **WASVulnTableName** - Enter name of the table used to store WAS Vulnerability Data logs. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TenableVM Vulnerability Management Report data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TenableVMAzureSentinelConnector310Updated-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. TenableVMXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\n\t a. **WorkspaceName** - Enter the Workspace Name of the log analytics Workspace. \n\n\t b. **TenableAccessKey** - Enter Access key for using the Tenable API. \n\n\t c. **TenableSecretKey** - Enter Tenable Secret Key for Authentication. \n\n\t d. **AzureClientID** - Enter Azure Client ID. \n\n\t e. **AzureClientSecret** - Enter Azure Client Secret. \n\n\t f. **TenantID** - Enter Tenant ID got from above steps. \n\n\t g. **AzureEntraObjectId** - Enter Azure Object ID got from above steps. \n\n\t h. **LowestSeveritytoStore** - Lowest vulnerability severity to store. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t i. **ComplianceDataIngestion** - Select true if you want to enable Compliance data ingestion from Tenable VM. Default is false. \n\n\t j. **WASAssetDataIngestion** - Select true if you want to enable WAS Asset data ingestion from Tenable VM. Default is false. \n\n\t k. **WASVulnerabilityDataIngestion** - Select true if you want to enable WAS Vulnerability data ingestion from Tenable VM. Default is false. \n\n\t l. **LowestSeveritytoStoreWAS** - The Lowest Vulnerability severity to store for WAS. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t m. **TenableExportScheduleInMinutes** - Schedule in minutes to create new export job from Tenable VM. Default is 1440. \n\n\t n. **AssetTableName** - Enter name of the table used to store Asset Data logs. \n\n\t o. **VulnTableName** - Enter name of the table used to store Vulnerability Data logs. \n\n\t p. **ComplianceTableName** - Enter name of the table used to store Compliance Data logs. \n\n\t q. **WASAssetTableName** - Enter name of the table used to store WAS Asset Data logs. \n\n\t r. **WASVulnTableName** - Enter name of the table used to store WAS Vulnerability Data logs. \n\n\t s. **PyTenableUAVendor** - Value must be set to **Microsoft**. \n\n\t t. **PyTenableUAProduct** - Value must be set to **Azure Sentinel**. \n\n\t u. **PyTenableUABuild** - Value must be set to **0.0.1**.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""Both a **TenableAccessKey** and a **TenableSecretKey** is required to access the Tenable REST API. [See the documentation to learn more about API](https://developer.tenable.com/reference#vulnerability-management). Check all [requirements and follow the instructions](https://docs.tenable.com/vulnerability-management/Content/Settings/my-account/GenerateAPIKey.htm) for obtaining credentials.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tenable%20App/Data%20Connectors/TenableVM/TenableVM.json","true" +"Tenable_VM_Compliance_CL","Tenable App","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tenable%20App","tenable","tenable-sentinel-integration","2024-06-06","2025-06-19","","Tenable","Partner","https://www.tenable.com/support/technical-support","","domains","TenableVM","Tenable","Tenable Vulnerability Management","The TVM data connector provides the ability to ingest Asset, Vulnerability, Compliance, WAS assets and WAS vulnerabilities data into Microsoft Sentinel using TVM REST APIs. Refer to [API documentation](https://developer.tenable.com/reference) for more information. The connector provides the ability to get data which helps to examine potential security risks, get insight into your computing assets, diagnose configuration problems and more","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Durable Functions to connect to the TenableVM API to pull [assets](https://developer.tenable.com/reference#exports-assets-download-chunk), [vulnerabilities](https://developer.tenable.com/reference#exports-vulns-request-export) and [compliance](https://developer.tenable.com/reference#exports-compliance-request-export)(if selected) at a regular interval into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a [**TenableVM parser for vulnerabilities**](https://aka.ms/sentinel-TenableApp-TenableVMVulnerabilities-parser) and a [**TenableVM parser for assets**](https://aka.ms/sentinel-TenableApp-TenableVMAssets-parser) based on a Kusto Function to work as expected which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for TenableVM**\n\n [Follow the instructions](https://docs.tenable.com/vulnerability-management/Content/Settings/my-account/GenerateAPIKey.htm) to obtain the required API credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TenableVM Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function App**\n\n""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TenableVM Vulnerability Management Report data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TenableVM-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-TenableVM-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group**, **FunctionApp Name** and **Location**. \n3. Enter the below information : \n\n\t a. **WorkspaceName** - Enter the Workspace Name of the log analytics Workspace. \n\n\t b. **TenableAccessKey** - Enter Access key for using the Tenable API. \n\n\t c. **TenableSecretKey** - Enter Tenable Secret Key for Authentication. \n\n\t d. **AzureClientID** - Enter Azure Client ID. \n\n\t e. **AzureClientSecret** - Enter Azure Client Secret. \n\n\t f. **TenantID** - Enter Tenant ID got from above steps. \n\n\t g. **AzureEntraObjectId** - Enter Azure Object ID got from above steps. \n\n\t h. **LowestSeveritytoStore** - Lowest vulnerability severity to store. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t i. **ComplianceDataIngestion** - Select true if you want to enable Compliance data ingestion from Tenable VM. Default is false. \n\n\t j. **WASAssetDataIngestion** - Select true if you want to enable WAS Asset data ingestion from Tenable VM. Default is false. \n\n\t k. **WASVulnerabilityDataIngestion** - Select true if you want to enable WAS Vulnerability data ingestion from Tenable VM. Default is false. \n\n\t l. **LowestSeveritytoStoreWAS** - The Lowest Vulnerability severity to store for WAS. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t m. **TenableExportScheduleInMinutes** - Schedule in minutes to create new export job from Tenable VM. Default is 1440. \n\n\t n. **AssetTableName** - Enter name of the table used to store Asset Data logs. \n\n\t o. **VulnTableName** - Enter name of the table used to store Vulnerability Data logs. \n\n\t p. **ComplianceTableName** - Enter name of the table used to store Compliance Data logs. \n\n\t q. **WASAssetTableName** - Enter name of the table used to store WAS Asset Data logs. \n\n\t r. **WASVulnTableName** - Enter name of the table used to store WAS Vulnerability Data logs. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TenableVM Vulnerability Management Report data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TenableVMAzureSentinelConnector310Updated-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. TenableVMXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\n\t a. **WorkspaceName** - Enter the Workspace Name of the log analytics Workspace. \n\n\t b. **TenableAccessKey** - Enter Access key for using the Tenable API. \n\n\t c. **TenableSecretKey** - Enter Tenable Secret Key for Authentication. \n\n\t d. **AzureClientID** - Enter Azure Client ID. \n\n\t e. **AzureClientSecret** - Enter Azure Client Secret. \n\n\t f. **TenantID** - Enter Tenant ID got from above steps. \n\n\t g. **AzureEntraObjectId** - Enter Azure Object ID got from above steps. \n\n\t h. **LowestSeveritytoStore** - Lowest vulnerability severity to store. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t i. **ComplianceDataIngestion** - Select true if you want to enable Compliance data ingestion from Tenable VM. Default is false. \n\n\t j. **WASAssetDataIngestion** - Select true if you want to enable WAS Asset data ingestion from Tenable VM. Default is false. \n\n\t k. **WASVulnerabilityDataIngestion** - Select true if you want to enable WAS Vulnerability data ingestion from Tenable VM. Default is false. \n\n\t l. **LowestSeveritytoStoreWAS** - The Lowest Vulnerability severity to store for WAS. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t m. **TenableExportScheduleInMinutes** - Schedule in minutes to create new export job from Tenable VM. Default is 1440. \n\n\t n. **AssetTableName** - Enter name of the table used to store Asset Data logs. \n\n\t o. **VulnTableName** - Enter name of the table used to store Vulnerability Data logs. \n\n\t p. **ComplianceTableName** - Enter name of the table used to store Compliance Data logs. \n\n\t q. **WASAssetTableName** - Enter name of the table used to store WAS Asset Data logs. \n\n\t r. **WASVulnTableName** - Enter name of the table used to store WAS Vulnerability Data logs. \n\n\t s. **PyTenableUAVendor** - Value must be set to **Microsoft**. \n\n\t t. **PyTenableUAProduct** - Value must be set to **Azure Sentinel**. \n\n\t u. **PyTenableUABuild** - Value must be set to **0.0.1**.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""Both a **TenableAccessKey** and a **TenableSecretKey** is required to access the Tenable REST API. [See the documentation to learn more about API](https://developer.tenable.com/reference#vulnerability-management). Check all [requirements and follow the instructions](https://docs.tenable.com/vulnerability-management/Content/Settings/my-account/GenerateAPIKey.htm) for obtaining credentials.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tenable%20App/Data%20Connectors/TenableVM/TenableVM.json","true" +"Tenable_VM_Vuln_CL","Tenable App","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tenable%20App","tenable","tenable-sentinel-integration","2024-06-06","2025-06-19","","Tenable","Partner","https://www.tenable.com/support/technical-support","","domains","TenableVM","Tenable","Tenable Vulnerability Management","The TVM data connector provides the ability to ingest Asset, Vulnerability, Compliance, WAS assets and WAS vulnerabilities data into Microsoft Sentinel using TVM REST APIs. Refer to [API documentation](https://developer.tenable.com/reference) for more information. The connector provides the ability to get data which helps to examine potential security risks, get insight into your computing assets, diagnose configuration problems and more","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Durable Functions to connect to the TenableVM API to pull [assets](https://developer.tenable.com/reference#exports-assets-download-chunk), [vulnerabilities](https://developer.tenable.com/reference#exports-vulns-request-export) and [compliance](https://developer.tenable.com/reference#exports-compliance-request-export)(if selected) at a regular interval into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a [**TenableVM parser for vulnerabilities**](https://aka.ms/sentinel-TenableApp-TenableVMVulnerabilities-parser) and a [**TenableVM parser for assets**](https://aka.ms/sentinel-TenableApp-TenableVMAssets-parser) based on a Kusto Function to work as expected which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for TenableVM**\n\n [Follow the instructions](https://docs.tenable.com/vulnerability-management/Content/Settings/my-account/GenerateAPIKey.htm) to obtain the required API credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TenableVM Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function App**\n\n""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TenableVM Vulnerability Management Report data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TenableVM-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-TenableVM-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group**, **FunctionApp Name** and **Location**. \n3. Enter the below information : \n\n\t a. **WorkspaceName** - Enter the Workspace Name of the log analytics Workspace. \n\n\t b. **TenableAccessKey** - Enter Access key for using the Tenable API. \n\n\t c. **TenableSecretKey** - Enter Tenable Secret Key for Authentication. \n\n\t d. **AzureClientID** - Enter Azure Client ID. \n\n\t e. **AzureClientSecret** - Enter Azure Client Secret. \n\n\t f. **TenantID** - Enter Tenant ID got from above steps. \n\n\t g. **AzureEntraObjectId** - Enter Azure Object ID got from above steps. \n\n\t h. **LowestSeveritytoStore** - Lowest vulnerability severity to store. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t i. **ComplianceDataIngestion** - Select true if you want to enable Compliance data ingestion from Tenable VM. Default is false. \n\n\t j. **WASAssetDataIngestion** - Select true if you want to enable WAS Asset data ingestion from Tenable VM. Default is false. \n\n\t k. **WASVulnerabilityDataIngestion** - Select true if you want to enable WAS Vulnerability data ingestion from Tenable VM. Default is false. \n\n\t l. **LowestSeveritytoStoreWAS** - The Lowest Vulnerability severity to store for WAS. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t m. **TenableExportScheduleInMinutes** - Schedule in minutes to create new export job from Tenable VM. Default is 1440. \n\n\t n. **AssetTableName** - Enter name of the table used to store Asset Data logs. \n\n\t o. **VulnTableName** - Enter name of the table used to store Vulnerability Data logs. \n\n\t p. **ComplianceTableName** - Enter name of the table used to store Compliance Data logs. \n\n\t q. **WASAssetTableName** - Enter name of the table used to store WAS Asset Data logs. \n\n\t r. **WASVulnTableName** - Enter name of the table used to store WAS Vulnerability Data logs. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TenableVM Vulnerability Management Report data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TenableVMAzureSentinelConnector310Updated-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. TenableVMXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\n\t a. **WorkspaceName** - Enter the Workspace Name of the log analytics Workspace. \n\n\t b. **TenableAccessKey** - Enter Access key for using the Tenable API. \n\n\t c. **TenableSecretKey** - Enter Tenable Secret Key for Authentication. \n\n\t d. **AzureClientID** - Enter Azure Client ID. \n\n\t e. **AzureClientSecret** - Enter Azure Client Secret. \n\n\t f. **TenantID** - Enter Tenant ID got from above steps. \n\n\t g. **AzureEntraObjectId** - Enter Azure Object ID got from above steps. \n\n\t h. **LowestSeveritytoStore** - Lowest vulnerability severity to store. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t i. **ComplianceDataIngestion** - Select true if you want to enable Compliance data ingestion from Tenable VM. Default is false. \n\n\t j. **WASAssetDataIngestion** - Select true if you want to enable WAS Asset data ingestion from Tenable VM. Default is false. \n\n\t k. **WASVulnerabilityDataIngestion** - Select true if you want to enable WAS Vulnerability data ingestion from Tenable VM. Default is false. \n\n\t l. **LowestSeveritytoStoreWAS** - The Lowest Vulnerability severity to store for WAS. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t m. **TenableExportScheduleInMinutes** - Schedule in minutes to create new export job from Tenable VM. Default is 1440. \n\n\t n. **AssetTableName** - Enter name of the table used to store Asset Data logs. \n\n\t o. **VulnTableName** - Enter name of the table used to store Vulnerability Data logs. \n\n\t p. **ComplianceTableName** - Enter name of the table used to store Compliance Data logs. \n\n\t q. **WASAssetTableName** - Enter name of the table used to store WAS Asset Data logs. \n\n\t r. **WASVulnTableName** - Enter name of the table used to store WAS Vulnerability Data logs. \n\n\t s. **PyTenableUAVendor** - Value must be set to **Microsoft**. \n\n\t t. **PyTenableUAProduct** - Value must be set to **Azure Sentinel**. \n\n\t u. **PyTenableUABuild** - Value must be set to **0.0.1**.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""Both a **TenableAccessKey** and a **TenableSecretKey** is required to access the Tenable REST API. [See the documentation to learn more about API](https://developer.tenable.com/reference#vulnerability-management). Check all [requirements and follow the instructions](https://docs.tenable.com/vulnerability-management/Content/Settings/my-account/GenerateAPIKey.htm) for obtaining credentials.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tenable%20App/Data%20Connectors/TenableVM/TenableVM.json","true" +"Tenable_WAS_Asset_CL","Tenable App","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tenable%20App","tenable","tenable-sentinel-integration","2024-06-06","2025-06-19","","Tenable","Partner","https://www.tenable.com/support/technical-support","","domains","TenableVM","Tenable","Tenable Vulnerability Management","The TVM data connector provides the ability to ingest Asset, Vulnerability, Compliance, WAS assets and WAS vulnerabilities data into Microsoft Sentinel using TVM REST APIs. Refer to [API documentation](https://developer.tenable.com/reference) for more information. The connector provides the ability to get data which helps to examine potential security risks, get insight into your computing assets, diagnose configuration problems and more","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Durable Functions to connect to the TenableVM API to pull [assets](https://developer.tenable.com/reference#exports-assets-download-chunk), [vulnerabilities](https://developer.tenable.com/reference#exports-vulns-request-export) and [compliance](https://developer.tenable.com/reference#exports-compliance-request-export)(if selected) at a regular interval into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a [**TenableVM parser for vulnerabilities**](https://aka.ms/sentinel-TenableApp-TenableVMVulnerabilities-parser) and a [**TenableVM parser for assets**](https://aka.ms/sentinel-TenableApp-TenableVMAssets-parser) based on a Kusto Function to work as expected which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for TenableVM**\n\n [Follow the instructions](https://docs.tenable.com/vulnerability-management/Content/Settings/my-account/GenerateAPIKey.htm) to obtain the required API credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TenableVM Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function App**\n\n""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TenableVM Vulnerability Management Report data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TenableVM-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-TenableVM-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group**, **FunctionApp Name** and **Location**. \n3. Enter the below information : \n\n\t a. **WorkspaceName** - Enter the Workspace Name of the log analytics Workspace. \n\n\t b. **TenableAccessKey** - Enter Access key for using the Tenable API. \n\n\t c. **TenableSecretKey** - Enter Tenable Secret Key for Authentication. \n\n\t d. **AzureClientID** - Enter Azure Client ID. \n\n\t e. **AzureClientSecret** - Enter Azure Client Secret. \n\n\t f. **TenantID** - Enter Tenant ID got from above steps. \n\n\t g. **AzureEntraObjectId** - Enter Azure Object ID got from above steps. \n\n\t h. **LowestSeveritytoStore** - Lowest vulnerability severity to store. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t i. **ComplianceDataIngestion** - Select true if you want to enable Compliance data ingestion from Tenable VM. Default is false. \n\n\t j. **WASAssetDataIngestion** - Select true if you want to enable WAS Asset data ingestion from Tenable VM. Default is false. \n\n\t k. **WASVulnerabilityDataIngestion** - Select true if you want to enable WAS Vulnerability data ingestion from Tenable VM. Default is false. \n\n\t l. **LowestSeveritytoStoreWAS** - The Lowest Vulnerability severity to store for WAS. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t m. **TenableExportScheduleInMinutes** - Schedule in minutes to create new export job from Tenable VM. Default is 1440. \n\n\t n. **AssetTableName** - Enter name of the table used to store Asset Data logs. \n\n\t o. **VulnTableName** - Enter name of the table used to store Vulnerability Data logs. \n\n\t p. **ComplianceTableName** - Enter name of the table used to store Compliance Data logs. \n\n\t q. **WASAssetTableName** - Enter name of the table used to store WAS Asset Data logs. \n\n\t r. **WASVulnTableName** - Enter name of the table used to store WAS Vulnerability Data logs. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TenableVM Vulnerability Management Report data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TenableVMAzureSentinelConnector310Updated-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. TenableVMXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\n\t a. **WorkspaceName** - Enter the Workspace Name of the log analytics Workspace. \n\n\t b. **TenableAccessKey** - Enter Access key for using the Tenable API. \n\n\t c. **TenableSecretKey** - Enter Tenable Secret Key for Authentication. \n\n\t d. **AzureClientID** - Enter Azure Client ID. \n\n\t e. **AzureClientSecret** - Enter Azure Client Secret. \n\n\t f. **TenantID** - Enter Tenant ID got from above steps. \n\n\t g. **AzureEntraObjectId** - Enter Azure Object ID got from above steps. \n\n\t h. **LowestSeveritytoStore** - Lowest vulnerability severity to store. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t i. **ComplianceDataIngestion** - Select true if you want to enable Compliance data ingestion from Tenable VM. Default is false. \n\n\t j. **WASAssetDataIngestion** - Select true if you want to enable WAS Asset data ingestion from Tenable VM. Default is false. \n\n\t k. **WASVulnerabilityDataIngestion** - Select true if you want to enable WAS Vulnerability data ingestion from Tenable VM. Default is false. \n\n\t l. **LowestSeveritytoStoreWAS** - The Lowest Vulnerability severity to store for WAS. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t m. **TenableExportScheduleInMinutes** - Schedule in minutes to create new export job from Tenable VM. Default is 1440. \n\n\t n. **AssetTableName** - Enter name of the table used to store Asset Data logs. \n\n\t o. **VulnTableName** - Enter name of the table used to store Vulnerability Data logs. \n\n\t p. **ComplianceTableName** - Enter name of the table used to store Compliance Data logs. \n\n\t q. **WASAssetTableName** - Enter name of the table used to store WAS Asset Data logs. \n\n\t r. **WASVulnTableName** - Enter name of the table used to store WAS Vulnerability Data logs. \n\n\t s. **PyTenableUAVendor** - Value must be set to **Microsoft**. \n\n\t t. **PyTenableUAProduct** - Value must be set to **Azure Sentinel**. \n\n\t u. **PyTenableUABuild** - Value must be set to **0.0.1**.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""Both a **TenableAccessKey** and a **TenableSecretKey** is required to access the Tenable REST API. [See the documentation to learn more about API](https://developer.tenable.com/reference#vulnerability-management). Check all [requirements and follow the instructions](https://docs.tenable.com/vulnerability-management/Content/Settings/my-account/GenerateAPIKey.htm) for obtaining credentials.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tenable%20App/Data%20Connectors/TenableVM/TenableVM.json","true" +"Tenable_WAS_Vuln_CL","Tenable App","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tenable%20App","tenable","tenable-sentinel-integration","2024-06-06","2025-06-19","","Tenable","Partner","https://www.tenable.com/support/technical-support","","domains","TenableVM","Tenable","Tenable Vulnerability Management","The TVM data connector provides the ability to ingest Asset, Vulnerability, Compliance, WAS assets and WAS vulnerabilities data into Microsoft Sentinel using TVM REST APIs. Refer to [API documentation](https://developer.tenable.com/reference) for more information. The connector provides the ability to get data which helps to examine potential security risks, get insight into your computing assets, diagnose configuration problems and more","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Durable Functions to connect to the TenableVM API to pull [assets](https://developer.tenable.com/reference#exports-assets-download-chunk), [vulnerabilities](https://developer.tenable.com/reference#exports-vulns-request-export) and [compliance](https://developer.tenable.com/reference#exports-compliance-request-export)(if selected) at a regular interval into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a [**TenableVM parser for vulnerabilities**](https://aka.ms/sentinel-TenableApp-TenableVMVulnerabilities-parser) and a [**TenableVM parser for assets**](https://aka.ms/sentinel-TenableApp-TenableVMAssets-parser) based on a Kusto Function to work as expected which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for TenableVM**\n\n [Follow the instructions](https://docs.tenable.com/vulnerability-management/Content/Settings/my-account/GenerateAPIKey.htm) to obtain the required API credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of TenableVM Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of TenableVM Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function App**\n\n""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TenableVM Vulnerability Management Report data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TenableVM-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-TenableVM-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group**, **FunctionApp Name** and **Location**. \n3. Enter the below information : \n\n\t a. **WorkspaceName** - Enter the Workspace Name of the log analytics Workspace. \n\n\t b. **TenableAccessKey** - Enter Access key for using the Tenable API. \n\n\t c. **TenableSecretKey** - Enter Tenable Secret Key for Authentication. \n\n\t d. **AzureClientID** - Enter Azure Client ID. \n\n\t e. **AzureClientSecret** - Enter Azure Client Secret. \n\n\t f. **TenantID** - Enter Tenant ID got from above steps. \n\n\t g. **AzureEntraObjectId** - Enter Azure Object ID got from above steps. \n\n\t h. **LowestSeveritytoStore** - Lowest vulnerability severity to store. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t i. **ComplianceDataIngestion** - Select true if you want to enable Compliance data ingestion from Tenable VM. Default is false. \n\n\t j. **WASAssetDataIngestion** - Select true if you want to enable WAS Asset data ingestion from Tenable VM. Default is false. \n\n\t k. **WASVulnerabilityDataIngestion** - Select true if you want to enable WAS Vulnerability data ingestion from Tenable VM. Default is false. \n\n\t l. **LowestSeveritytoStoreWAS** - The Lowest Vulnerability severity to store for WAS. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t m. **TenableExportScheduleInMinutes** - Schedule in minutes to create new export job from Tenable VM. Default is 1440. \n\n\t n. **AssetTableName** - Enter name of the table used to store Asset Data logs. \n\n\t o. **VulnTableName** - Enter name of the table used to store Vulnerability Data logs. \n\n\t p. **ComplianceTableName** - Enter name of the table used to store Compliance Data logs. \n\n\t q. **WASAssetTableName** - Enter name of the table used to store WAS Asset Data logs. \n\n\t r. **WASVulnTableName** - Enter name of the table used to store WAS Vulnerability Data logs. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TenableVM Vulnerability Management Report data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TenableVMAzureSentinelConnector310Updated-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. TenableVMXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.12.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\n\t a. **WorkspaceName** - Enter the Workspace Name of the log analytics Workspace. \n\n\t b. **TenableAccessKey** - Enter Access key for using the Tenable API. \n\n\t c. **TenableSecretKey** - Enter Tenable Secret Key for Authentication. \n\n\t d. **AzureClientID** - Enter Azure Client ID. \n\n\t e. **AzureClientSecret** - Enter Azure Client Secret. \n\n\t f. **TenantID** - Enter Tenant ID got from above steps. \n\n\t g. **AzureEntraObjectId** - Enter Azure Object ID got from above steps. \n\n\t h. **LowestSeveritytoStore** - Lowest vulnerability severity to store. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t i. **ComplianceDataIngestion** - Select true if you want to enable Compliance data ingestion from Tenable VM. Default is false. \n\n\t j. **WASAssetDataIngestion** - Select true if you want to enable WAS Asset data ingestion from Tenable VM. Default is false. \n\n\t k. **WASVulnerabilityDataIngestion** - Select true if you want to enable WAS Vulnerability data ingestion from Tenable VM. Default is false. \n\n\t l. **LowestSeveritytoStoreWAS** - The Lowest Vulnerability severity to store for WAS. Allowed Values: Info, Low, Medium, High, Critical. Default is Info. \n\n\t m. **TenableExportScheduleInMinutes** - Schedule in minutes to create new export job from Tenable VM. Default is 1440. \n\n\t n. **AssetTableName** - Enter name of the table used to store Asset Data logs. \n\n\t o. **VulnTableName** - Enter name of the table used to store Vulnerability Data logs. \n\n\t p. **ComplianceTableName** - Enter name of the table used to store Compliance Data logs. \n\n\t q. **WASAssetTableName** - Enter name of the table used to store WAS Asset Data logs. \n\n\t r. **WASVulnTableName** - Enter name of the table used to store WAS Vulnerability Data logs. \n\n\t s. **PyTenableUAVendor** - Value must be set to **Microsoft**. \n\n\t t. **PyTenableUAProduct** - Value must be set to **Azure Sentinel**. \n\n\t u. **PyTenableUABuild** - Value must be set to **0.0.1**.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""Both a **TenableAccessKey** and a **TenableSecretKey** is required to access the Tenable REST API. [See the documentation to learn more about API](https://developer.tenable.com/reference#vulnerability-management). Check all [requirements and follow the instructions](https://docs.tenable.com/vulnerability-management/Content/Settings/my-account/GenerateAPIKey.htm) for obtaining credentials.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tenable%20App/Data%20Connectors/TenableVM/TenableVM.json","true" +"Tenable_ad_CL","TenableAD","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/TenableAD","","","","","","","","","","","Tenable.ad","Tenable","Tenable.ad","Tenable.ad connector allows to export Tenable.ad Indicators of Exposures, trailflow and Indicators of Attacks logs to Azure Sentinel in real time.
It provides a data parser to manipulate the logs more easily. The different workbooks ease your Active Directory monitoring and provide different ways to visualize the data. The analytic templates allow to automate responses regarding different events, exposures, or attacks.","[{""title"": """", ""description"": "">This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://raw.githubusercontent.com/tenable/Azure-Sentinel/Tenable.ad-connector/Solutions/TenableAD/Parsers/afad_parser.kql) to create the Kusto Functions alias, **afad_parser**"", ""instructions"": []}, {""title"": ""1. Configure the Syslog server"", ""description"": ""You will first need a **linux Syslog** server that Tenable.ad will send logs to. Typically you can run **rsyslog** on **Ubuntu**.\n You can then configure this server as you wish, but it is recommended to be able to output Tenable.ad logs in a separate file.\n\nConfigure rsyslog to accept logs from your Tenable.ad IP address.:\n\n```shell\nsudo -i\n\n# Set Tenable.ad source IP address\nexport TENABLE_AD_IP={Enter your IP address}\n\n# Create rsyslog configuration file\ncat > /etc/rsyslog.d/80-tenable.conf << EOF\n\\$ModLoad imudp\n\\$UDPServerRun 514\n\\$ModLoad imtcp\n\\$InputTCPServerRun 514\n\\$AllowedSender TCP, 127.0.0.1, $TENABLE_AD_IP\n\\$AllowedSender UDP, 127.0.0.1, $TENABLE_AD_IP\n\\$template MsgTemplate,\""%TIMESTAMP:::date-rfc3339% %HOSTNAME% %programname%[%procid%]:%msg%\\n\""\n\\$template remote-incoming-logs, \""/var/log/%PROGRAMNAME%.log\""\n*.* ?remote-incoming-logs;MsgTemplate\nEOF\n\n# Restart rsyslog\nsystemctl restart rsyslog\n```""}, {""title"": ""2. Install and onboard the Microsoft agent for Linux"", ""description"": ""The OMS agent will receive the Tenable.ad syslog events and publish it in Sentinel :"", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""3. Check agent logs on the Syslog server"", ""description"": ""```shell\ntail -f /var/opt/microsoft/omsagent/log/omsagent.log\n```""}, {""title"": ""4. Configure Tenable.ad to send logs to your Syslog server"", ""description"": ""On your **Tenable.ad** portal, go to *System*, *Configuration* and then *Syslog*.\nFrom there you can create a new Syslog alert toward your Syslog server.\n\nOnce this is done, check that the logs are correctly gathered on your server in a separate file (to do this, you can use the *Test the configuration* button in the Syslog alert configuration in Tenable.ad).\nIf you used the Quickstart template, the Syslog server will by default listen on port 514 in UDP and 1514 in TCP, without TLS.""}, {""title"": ""5. Configure the custom logs"", ""description"": ""Configure the agent to collect the logs.\n\n1. In Sentinel, go to **Configuration** -> **Settings** -> **Workspace settings** -> **Custom logs**.\n2. Click **Add custom log**.\n3. Upload a sample Tenable.ad.log Syslog file from the **Linux** machine running the **Syslog** server and click **Next**\n4. Set the record delimiter to **New Line** if not already the case and click **Next**.\n5. Select **Linux** and enter the file path to the **Syslog** file, click **+** then **Next**. The default location of the file is `/var/log/Tenable.ad.log` if you have a Tenable version <3.1.0, you must also add this linux file location `/var/log/AlsidForAD.log`.\n6. Set the **Name** to *Tenable_ad_CL* (Azure automatically adds *_CL* at the end of the name, there must be only one, make sure the name is not *Tenable_ad_CL_CL*).\n7. Click **Next**, you will see a resume, then click **Create**\n"", ""instructions"": []}, {""title"": ""6. Enjoy !"", ""description"": ""> You should now be able to receive logs in the *Tenable_ad_CL* table, logs data can be parse using the **afad_parser()** function, used by all query samples, workbooks and analytic templates.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Access to Tenable.ad Configuration"", ""description"": ""Permissions to configure syslog alerting engine""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/TenableAD/Data%20Connectors/Tenable.ad.json","true" +"Tenable_IO_Assets_CL","TenableIO","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/TenableIO","tenable","tenable-sentinel-integration","2022-06-01","","","Tenable","Partner","https://www.tenable.com/support/technical-support","","domains","TenableIOAPI","Tenable","Tenable.io Vulnerability Management","The [Tenable.io](https://www.tenable.com/products/tenable-io) data connector provides the capability to ingest Asset and Vulnerability data into Microsoft Sentinel through the REST API from the Tenable.io platform (Managed in the cloud). Refer to [API documentation](https://developer.tenable.com/reference) for more information. The connector provides the ability to get data which helps to examine potential security risks, get insight into your computing assets, diagnose configuration problems and more","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Durable Functions to connect to the Tenable.io API to pull [assets](https://developer.tenable.com/reference#exports-assets-download-chunk) and [vulnerabilities](https://developer.tenable.com/reference#exports-vulns-request-export) at a regular interval into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a [**Tenable.io parser for vulnerabilities**](https://aka.ms/sentinel-TenableIO-TenableIOVulnerabilities-parser) and a [**Tenable.io parser for assets**](https://aka.ms/sentinel-TenableIO-TenableIOAssets-parser) based on a Kusto Function to work as expected which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for Tenable.io**\n\n [Follow the instructions](https://docs.tenable.com/tenableio/vulnerabilitymanagement/Content/Settings/GenerateAPIKey.htm) to obtain the required API credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function App**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Tenable.io Vulnerability Management Report data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TenableIO-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **TenableAccessKey** and **TenableSecretKey** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Tenable.io Vulnerability Management Report data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TenableIO-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. TenableIOXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tTenableAccessKey\n\t\tTenableSecretKey\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""Both a **TenableAccessKey** and a **TenableSecretKey** is required to access the Tenable REST API. [See the documentation to learn more about API](https://developer.tenable.com/reference#vulnerability-management). Check all [requirements and follow the instructions](https://docs.tenable.com/tenableio/vulnerabilitymanagement/Content/Settings/GenerateAPIKey.htm) for obtaining credentials.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/TenableIO/Data%20Connectors/TenableIO.json","true" +"Tenable_IO_Vuln_CL","TenableIO","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/TenableIO","tenable","tenable-sentinel-integration","2022-06-01","","","Tenable","Partner","https://www.tenable.com/support/technical-support","","domains","TenableIOAPI","Tenable","Tenable.io Vulnerability Management","The [Tenable.io](https://www.tenable.com/products/tenable-io) data connector provides the capability to ingest Asset and Vulnerability data into Microsoft Sentinel through the REST API from the Tenable.io platform (Managed in the cloud). Refer to [API documentation](https://developer.tenable.com/reference) for more information. The connector provides the ability to get data which helps to examine potential security risks, get insight into your computing assets, diagnose configuration problems and more","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Durable Functions to connect to the Tenable.io API to pull [assets](https://developer.tenable.com/reference#exports-assets-download-chunk) and [vulnerabilities](https://developer.tenable.com/reference#exports-vulns-request-export) at a regular interval into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a [**Tenable.io parser for vulnerabilities**](https://aka.ms/sentinel-TenableIO-TenableIOVulnerabilities-parser) and a [**Tenable.io parser for assets**](https://aka.ms/sentinel-TenableIO-TenableIOAssets-parser) based on a Kusto Function to work as expected which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for Tenable.io**\n\n [Follow the instructions](https://docs.tenable.com/tenableio/vulnerabilitymanagement/Content/Settings/GenerateAPIKey.htm) to obtain the required API credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function App**\n\n>**IMPORTANT:** Before deploying the Workspace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Tenable.io Vulnerability Management Report data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TenableIO-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **TenableAccessKey** and **TenableSecretKey** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Tenable.io Vulnerability Management Report data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TenableIO-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. TenableIOXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tTenableAccessKey\n\t\tTenableSecretKey\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n3. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""Both a **TenableAccessKey** and a **TenableSecretKey** is required to access the Tenable REST API. [See the documentation to learn more about API](https://developer.tenable.com/reference#vulnerability-management). Check all [requirements and follow the instructions](https://docs.tenable.com/tenableio/vulnerabilitymanagement/Content/Settings/GenerateAPIKey.htm) for obtaining credentials.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/TenableIO/Data%20Connectors/TenableIO.json","true" +"","TestSolution","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/TestSolution","","","","","","","","","","domains","","","","","","","","false" +"TheHive_CL","TheHive","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/TheHive","azuresentinel","azure-sentinel-solution-thehive","2021-10-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","TheHiveProjectTheHive","TheHive Project","TheHive Project - TheHive","The [TheHive](http://thehive-project.org/) data connector provides the capability to ingest common TheHive events into Microsoft Sentinel through Webhooks. TheHive can notify external system of modification events (case creation, alert update, task assignment) in real time. When a change occurs in the TheHive, an HTTPS POST request with event information is sent to a callback data connector URL. Refer to [Webhooks documentation](https://docs.thehive-project.org/thehive/legacy/thehive3/admin/webhooks/) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""title"": """", ""description"": "">**NOTE:** This data connector uses Azure Functions based on HTTP Trigger for waiting POST requests with logs to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**TheHive**](https://aka.ms/sentinel-TheHive-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the TheHive**\n\n Follow the [instructions](https://docs.thehive-project.org/thehive/installation-and-configuration/configuration/webhooks/) to configure Webhooks.\n\n1. Authentication method is *Beared Auth*.\n2. Generate the **TheHiveBearerToken** according to your password policy.\n3. Setup Webhook notifications in the *application.conf* file including **TheHiveBearerToken** parameter.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the TheHive data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the TheHive data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TheHive-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **TheHiveBearerToken** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n6. After deploying open Function App page, select your app, go to the **Functions** and click **Get Function Url** copy it and follow p.7 from STEP 1.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the TheHive data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TheHive-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. Go to Azure Portal for the Function App configuration.\n2. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tTheHiveBearerToken\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Webhooks Credentials/permissions"", ""description"": ""**TheHiveBearerToken**, **Callback URL** are required for working Webhooks. See the documentation to learn more about [configuring Webhooks](https://docs.thehive-project.org/thehive/installation-and-configuration/configuration/webhooks/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/TheHive/Data%20Connectors/TheHive_Webhooks_FunctionApp.json","true" +"TheomAlerts_CL","Theom","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Theom","theominc1667512729960","theom_sentinel","2022-11-04","","","Theom","Partner","https://www.theom.ai","","domains","Theom","Theom","Theom","Theom Data Connector enables organizations to connect their Theom environment to Microsoft Sentinel. This solution enables users to receive alerts on data security risks, create and enrich incidents, check statistics and trigger SOAR playbooks in Microsoft Sentinel","[{""title"": """", ""description"": ""1. In **Theom UI Console** click on **Manage -> Alerts** on the side bar.\n2. Select **Sentinel** tab.\n3. Click on **Active** button to enable the configuration.\n4. Enter `Primary` key as `Authorization Token`\n5. Enter `Endpoint URL` as `https://.ods.opinsights.azure.com/api/logs?api-version=2016-04-01`\n6. Click on `SAVE SETTINGS`\n"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Theom/Data%20Connectors/Theom.json","true" +"ThreatIntelligenceIndicator","Threat Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence","azuresentinel","azure-sentinel-solution-threatintelligence-taxii","2022-05-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","MicrosoftDefenderThreatIntelligence","Microsoft","Microsoft Defender Threat Intelligence","Microsoft Sentinel provides you the capability to import threat intelligence generated by Microsoft to enable monitoring, alerting and hunting. Use this data connector to import Indicators of Compromise (IOCs) from Microsoft Defender Threat Intelligence (MDTI) into Microsoft Sentinel. Threat indicators can include IP addresses, domains, URLs, and file hashes, etc.","[{""title"": ""Use this data connector to import Indicators of Compromise (IOCs) from Microsoft Defender Threat Intelligence (MDTI) into Microsoft Sentinel.""}, {""instructions"": [{""type"": ""MicrosoftThreatIntelligence"", ""parameters"": {""connectorKind"": ""MicrosoftThreatIntelligence""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence/Data%20Connectors/template_MicrosoftDefenderThreatIntelligence.json","true" +"ThreatIntelligenceIndicator","Threat Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence","azuresentinel","azure-sentinel-solution-threatintelligence-taxii","2022-05-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","PremiumMicrosoftDefenderForThreatIntelligence","Microsoft","Premium Microsoft Defender Threat Intelligence","Microsoft Sentinel provides you the capability to import threat intelligence generated by Microsoft to enable monitoring, alerting and hunting. Use this data connector to import Indicators of Compromise (IOCs) from Premium Microsoft Defender Threat Intelligence (MDTI) into Microsoft Sentinel. Threat indicators can include IP addresses, domains, URLs, and file hashes, etc. Note: This is a paid connector. To use and ingest data from it, please purchase the ""MDTI API Access"" SKU from the Partner Center.","[{""title"": ""Use this data connector to import Indicators of Compromise (IOCs) from Premium Microsoft Defender Threat Intelligence (MDTI) into Microsoft Sentinel.""}, {""instructions"": [{""type"": ""PremiumMicrosoftDefenderForThreatIntelligence"", ""parameters"": {""connectorKind"": ""PremiumMicrosoftDefenderForThreatIntelligence""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence/Data%20Connectors/template_PremiumMicrosoftDefenderThreatIntelligence.json","true" +"CommonSecurityLog","Threat Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence","azuresentinel","azure-sentinel-solution-threatintelligence-taxii","2022-05-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligence","Microsoft","Threat Intelligence Platforms","Microsoft Sentinel integrates with Microsoft Graph Security API data sources to enable monitoring, alerting, and hunting using your threat intelligence. Use this connector to send threat indicators to Microsoft Sentinel from your Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MindMeld, MISP, or other integrated applications. Threat indicators can include IP addresses, domains, URLs, and file hashes. For more information, see the [Microsoft Sentinel documentation >](https://go.microsoft.com/fwlink/p/?linkid=2223729&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""You can connect your threat intelligence data sources to Microsoft Sentinel by either:"", ""description"": ""- Using an integrated Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MindMeld, MISP, and others.\n\n- Calling the Microsoft Graph Security API directly from another application.""}, {""title"": ""Follow These Steps to Connect your Threat Intelligence:"", ""description"": ""1) [Register an application](https://docs.microsoft.com/graph/auth-v2-service#1-register-your-app) in Azure Active Directory.\n\n2) [Configure permissions](https://docs.microsoft.com/graph/auth-v2-service#2-configure-permissions-for-microsoft-graph) and be sure to add the ThreatIndicators.ReadWrite.OwnedBy permission to the application.\n\n3) Ask your Azure AD tenant administrator to [grant consent](https://docs.microsoft.com/graph/auth-v2-service#3-get-administrator-consent) to the application.\n\n4) Configure your TIP or other integrated application to push indicators to Microsoft Sentinel by specifying the following:\n\n a. The application ID and secret you received when registering the app (step 1 above). \n\n b. Set \u201cMicrosoft Sentinel\u201d as the target.\n\n c. Set an action for each indicator - \u2018alert\u2019 is most relevant for Microsoft Sentinel use cases \n\nFor the latest list of integrated Threat Intelligence Platforms and detailed configuration instructions, see the [full documentation](https://docs.microsoft.com/azure/sentinel/import-threat-intelligence#adding-threat-indicators-to-azure-sentinel-with-the-threat-intelligence-platforms-data-connector).\n\nClick on \""Connect\"" below\n\n> Data from all regions will be sent to and stored in the workspace's region."", ""instructions"": [{""parameters"": {}, ""type"": ""ThreatIntelligence""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""tenant"": [""GlobalAdmin"", ""SecurityAdmin""]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence/Data%20Connectors/template_ThreatIntelligence.json","true" +"ThreatIntelligenceIndicator","Threat Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence","azuresentinel","azure-sentinel-solution-threatintelligence-taxii","2022-05-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligence","Microsoft","Threat Intelligence Platforms","Microsoft Sentinel integrates with Microsoft Graph Security API data sources to enable monitoring, alerting, and hunting using your threat intelligence. Use this connector to send threat indicators to Microsoft Sentinel from your Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MindMeld, MISP, or other integrated applications. Threat indicators can include IP addresses, domains, URLs, and file hashes. For more information, see the [Microsoft Sentinel documentation >](https://go.microsoft.com/fwlink/p/?linkid=2223729&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""You can connect your threat intelligence data sources to Microsoft Sentinel by either:"", ""description"": ""- Using an integrated Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MindMeld, MISP, and others.\n\n- Calling the Microsoft Graph Security API directly from another application.""}, {""title"": ""Follow These Steps to Connect your Threat Intelligence:"", ""description"": ""1) [Register an application](https://docs.microsoft.com/graph/auth-v2-service#1-register-your-app) in Azure Active Directory.\n\n2) [Configure permissions](https://docs.microsoft.com/graph/auth-v2-service#2-configure-permissions-for-microsoft-graph) and be sure to add the ThreatIndicators.ReadWrite.OwnedBy permission to the application.\n\n3) Ask your Azure AD tenant administrator to [grant consent](https://docs.microsoft.com/graph/auth-v2-service#3-get-administrator-consent) to the application.\n\n4) Configure your TIP or other integrated application to push indicators to Microsoft Sentinel by specifying the following:\n\n a. The application ID and secret you received when registering the app (step 1 above). \n\n b. Set \u201cMicrosoft Sentinel\u201d as the target.\n\n c. Set an action for each indicator - \u2018alert\u2019 is most relevant for Microsoft Sentinel use cases \n\nFor the latest list of integrated Threat Intelligence Platforms and detailed configuration instructions, see the [full documentation](https://docs.microsoft.com/azure/sentinel/import-threat-intelligence#adding-threat-indicators-to-azure-sentinel-with-the-threat-intelligence-platforms-data-connector).\n\nClick on \""Connect\"" below\n\n> Data from all regions will be sent to and stored in the workspace's region."", ""instructions"": [{""parameters"": {}, ""type"": ""ThreatIntelligence""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""tenant"": [""GlobalAdmin"", ""SecurityAdmin""]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence/Data%20Connectors/template_ThreatIntelligence.json","true" +"ThreatIntelligenceIndicator","Threat Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence","azuresentinel","azure-sentinel-solution-threatintelligence-taxii","2022-05-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligenceTaxii","Microsoft","Threat intelligence - TAXII","Microsoft Sentinel integrates with TAXII 2.0 and 2.1 data sources to enable monitoring, alerting, and hunting using your threat intelligence. Use this connector to send the supported STIX object types from TAXII servers to Microsoft Sentinel. Threat indicators can include IP addresses, domains, URLs, and file hashes. For more information, see the [Microsoft Sentinel documentation >](https://go.microsoft.com/fwlink/p/?linkid=2224105&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Configure TAXII servers to stream STIX 2.0 or 2.1 STIX objects to Microsoft Sentinel"", ""description"": ""You can connect your TAXII servers to Microsoft Sentinel using the built-in TAXII connector. For detailed configuration instructions, see the [full documentation](https://docs.microsoft.com/azure/sentinel/import-threat-intelligence#adding-threat-indicators-to-azure-sentinel-with-the-threat-intelligence---taxii-data-connector). \n\nEnter the following information and select Add to configure your TAXII server."", ""instructions"": [{""parameters"": {}, ""type"": ""ThreatIntelligenceTaxii""}]}]","{""customs"": [{""name"": ""TAXII Server"", ""description"": ""TAXII 2.0 or TAXII 2.1 Server URI and Collection ID.""}], ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence/Data%20Connectors/template_ThreatIntelligenceTaxii.json","true" +"ThreatIntelligenceIndicator","Threat Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence","azuresentinel","azure-sentinel-solution-threatintelligence-taxii","2022-05-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligenceUploadIndicatorsAPI","Microsoft","Threat Intelligence Upload API (Preview)","Microsoft Sentinel offers a data plane API to bring in threat intelligence from your Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MineMeld, MISP, or other integrated applications. Threat indicators can include IP addresses, domains, URLs, file hashes and email addresses. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2269830&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""You can connect your threat intelligence data sources to Microsoft Sentinel by either: "", ""description"": ""\n>Using an integrated Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MineMeld, MISP, and others. \n\n>Calling the Microsoft Sentinel data plane API directly from another application. \n - Note: The 'Status' of the connector will not appear as 'Connected' here, because the data is ingested by making an API call.""}, {""title"": ""Follow These Steps to Connect to your Threat Intelligence: "", ""description"": """"}, {""title"": ""1. Get Microsoft Entra ID Access Token"", ""description"": ""To send request to the APIs, you need to acquire Microsoft Entra ID access token. You can follow instruction in this page: https://docs.microsoft.com/azure/databricks/dev-tools/api/latest/aad/app-aad-token#get-an-azure-ad-access-token \n - Notice: Please request Microsoft Entra ID access token with scope value: \nFairfax: https://management.usgovcloudapi.net/.default \nMooncake: https://management.chinacloudapi.cn/.default ""}, {""title"": ""2. Send STIX objects to Sentinel"", ""description"": ""You can send the supported STIX object types by calling our Upload API. For more information about the API, click [here](https://learn.microsoft.com/azure/sentinel/stix-objects-api). \n\n>HTTP method: POST \n\n>Endpoint: \nFairfax: https://api.ti.sentinel.azure.us/workspaces/[WorkspaceID]/threatintelligence-stix-objects:upload?api-version=2024-02-01-preview \nMooncake: https://api.ti.sentinel.azure.cn/workspaces/[WorkspaceID]/threatintelligence-stix-objects:upload?api-version=2024-02-01-preview \n\n>WorkspaceID: the workspace that the STIX objects are uploaded to. \n\n\n>Header Value 1: \""Authorization\"" = \""Bearer [Microsoft Entra ID Access Token from step 1]\"" \n\n\n> Header Value 2: \""Content-Type\"" = \""application/json\"" \n \n>Body: The body is a JSON object containing an array of STIX objects.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.SecurityInsights/threatintelligence/write"", ""permissionsDisplayText"": ""write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence/Data%20Connectors/template_ThreatIntelligenceUploadIndicators_ForGov.json","true" +"ThreatIntelligenceIndicator","Threat Intelligence","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence","azuresentinel","azure-sentinel-solution-threatintelligence-taxii","2022-05-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligenceUploadIndicatorsAPI","Microsoft","Threat Intelligence Upload API (Preview)","Microsoft Sentinel offers a data plane API to bring in threat intelligence from your Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MineMeld, MISP, or other integrated applications. Threat indicators can include IP addresses, domains, URLs, file hashes and email addresses. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2269830&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""You can connect your threat intelligence data sources to Microsoft Sentinel by either: "", ""description"": ""\n>Using an integrated Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MineMeld, MISP, and others. \n\n>Calling the Microsoft Sentinel data plane API directly from another application. \n - Note: The 'Status' of the connector will not appear as 'Connected' here, because the data is ingested by making an API call.""}, {""title"": ""Follow These Steps to Connect to your Threat Intelligence: "", ""description"": """"}, {""title"": ""1. Get Microsoft Entra ID Access Token"", ""description"": ""To send request to the APIs, you need to acquire Microsoft Entra ID access token. You can follow instruction in this page: https://docs.microsoft.com/azure/databricks/dev-tools/api/latest/aad/app-aad-token#get-an-azure-ad-access-token \n - Notice: Please request Microsoft Entra ID access token with scope value: https://management.azure.com/.default ""}, {""title"": ""2. Send STIX objects to Sentinel"", ""description"": ""You can send the supported STIX object types by calling our Upload API. For more information about the API, click [here](https://learn.microsoft.com/azure/sentinel/stix-objects-api). \n\n>HTTP method: POST \n\n>Endpoint: https://api.ti.sentinel.azure.com/workspaces/[WorkspaceID]/threatintelligence-stix-objects:upload?api-version=2024-02-01-preview \n\n>WorkspaceID: the workspace that the STIX objects are uploaded to. \n\n\n>Header Value 1: \""Authorization\"" = \""Bearer [Microsoft Entra ID Access Token from step 1]\"" \n\n\n> Header Value 2: \""Content-Type\"" = \""application/json\"" \n \n>Body: The body is a JSON object containing an array of STIX objects.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.SecurityInsights/threatintelligence/write"", ""permissionsDisplayText"": ""write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence/Data%20Connectors/template_ThreatIntelligenceUploadIndicators.json","true" +"ThreatIntelIndicators","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","MicrosoftDefenderThreatIntelligence","Microsoft","Microsoft Defender Threat Intelligence","Microsoft Sentinel provides you the capability to import threat intelligence generated by Microsoft to enable monitoring, alerting and hunting. Use this data connector to import Indicators of Compromise (IOCs) from Microsoft Defender Threat Intelligence (MDTI) into Microsoft Sentinel. Threat indicators can include IP addresses, domains, URLs, and file hashes, etc.","[{""title"": ""Use this data connector to import Indicators of Compromise (IOCs) from Microsoft Defender Threat Intelligence (MDTI) into Microsoft Sentinel.""}, {""instructions"": [{""type"": ""MicrosoftThreatIntelligence"", ""parameters"": {""connectorKind"": ""MicrosoftThreatIntelligence""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_MicrosoftDefenderThreatIntelligence.json","true" +"ThreatIntelObjects","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","MicrosoftDefenderThreatIntelligence","Microsoft","Microsoft Defender Threat Intelligence","Microsoft Sentinel provides you the capability to import threat intelligence generated by Microsoft to enable monitoring, alerting and hunting. Use this data connector to import Indicators of Compromise (IOCs) from Microsoft Defender Threat Intelligence (MDTI) into Microsoft Sentinel. Threat indicators can include IP addresses, domains, URLs, and file hashes, etc.","[{""title"": ""Use this data connector to import Indicators of Compromise (IOCs) from Microsoft Defender Threat Intelligence (MDTI) into Microsoft Sentinel.""}, {""instructions"": [{""type"": ""MicrosoftThreatIntelligence"", ""parameters"": {""connectorKind"": ""MicrosoftThreatIntelligence""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_MicrosoftDefenderThreatIntelligence.json","true" +"ThreatIntelIndicators","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","PremiumMicrosoftDefenderForThreatIntelligence","Microsoft","Premium Microsoft Defender Threat Intelligence","Microsoft Sentinel provides you the capability to import threat intelligence generated by Microsoft to enable monitoring, alerting and hunting. Use this data connector to import Indicators of Compromise (IOCs) from Premium Microsoft Defender Threat Intelligence (MDTI) into Microsoft Sentinel. Threat indicators can include IP addresses, domains, URLs, and file hashes, etc. Note: This is a paid connector. To use and ingest data from it, please purchase the ""MDTI API Access"" SKU from the Partner Center.","[{""title"": ""Use this data connector to import Indicators of Compromise (IOCs) from Premium Microsoft Defender Threat Intelligence (MDTI) into Microsoft Sentinel.""}, {""instructions"": [{""type"": ""PremiumMicrosoftDefenderForThreatIntelligence"", ""parameters"": {""connectorKind"": ""PremiumMicrosoftDefenderForThreatIntelligence""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_PremiumMicrosoftDefenderThreatIntelligence.json","true" +"ThreatIntelObjects","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","PremiumMicrosoftDefenderForThreatIntelligence","Microsoft","Premium Microsoft Defender Threat Intelligence","Microsoft Sentinel provides you the capability to import threat intelligence generated by Microsoft to enable monitoring, alerting and hunting. Use this data connector to import Indicators of Compromise (IOCs) from Premium Microsoft Defender Threat Intelligence (MDTI) into Microsoft Sentinel. Threat indicators can include IP addresses, domains, URLs, and file hashes, etc. Note: This is a paid connector. To use and ingest data from it, please purchase the ""MDTI API Access"" SKU from the Partner Center.","[{""title"": ""Use this data connector to import Indicators of Compromise (IOCs) from Premium Microsoft Defender Threat Intelligence (MDTI) into Microsoft Sentinel.""}, {""instructions"": [{""type"": ""PremiumMicrosoftDefenderForThreatIntelligence"", ""parameters"": {""connectorKind"": ""PremiumMicrosoftDefenderForThreatIntelligence""}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_PremiumMicrosoftDefenderThreatIntelligence.json","true" +"CommonSecurityLog","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligence","Microsoft","Threat Intelligence Platforms","Microsoft Sentinel integrates with Microsoft Graph Security API data sources to enable monitoring, alerting, and hunting using your threat intelligence. Use this connector to send threat indicators to Microsoft Sentinel from your Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MindMeld, MISP, or other integrated applications. Threat indicators can include IP addresses, domains, URLs, and file hashes. For more information, see the [Microsoft Sentinel documentation >](https://go.microsoft.com/fwlink/p/?linkid=2223729&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""You can connect your threat intelligence data sources to Microsoft Sentinel by either:"", ""description"": ""- Using an integrated Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MindMeld, MISP, and others.\n\n- Calling the Microsoft Graph Security API directly from another application.""}, {""title"": ""Follow These Steps to Connect your Threat Intelligence:"", ""description"": ""1) [Register an application](https://docs.microsoft.com/graph/auth-v2-service#1-register-your-app) in Azure Active Directory.\n\n2) [Configure permissions](https://docs.microsoft.com/graph/auth-v2-service#2-configure-permissions-for-microsoft-graph) and be sure to add the ThreatIndicators.ReadWrite.OwnedBy permission to the application.\n\n3) Ask your Azure AD tenant administrator to [grant consent](https://docs.microsoft.com/graph/auth-v2-service#3-get-administrator-consent) to the application.\n\n4) Configure your TIP or other integrated application to push indicators to Microsoft Sentinel by specifying the following:\n\n a. The application ID and secret you received when registering the app (step 1 above). \n\n b. Set \u201cMicrosoft Sentinel\u201d as the target.\n\n c. Set an action for each indicator - \u2018alert\u2019 is most relevant for Microsoft Sentinel use cases \n\nFor the latest list of integrated Threat Intelligence Platforms and detailed configuration instructions, see the [full documentation](https://docs.microsoft.com/azure/sentinel/import-threat-intelligence#adding-threat-indicators-to-azure-sentinel-with-the-threat-intelligence-platforms-data-connector).\n\nClick on \""Connect\"" below\n\n> Data from all regions will be sent to and stored in the workspace's region."", ""instructions"": [{""parameters"": {}, ""type"": ""ThreatIntelligence""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""tenant"": [""GlobalAdmin"", ""SecurityAdmin""]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_ThreatIntelligence.json","true" +"ThreatIntelIndicators","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligence","Microsoft","Threat Intelligence Platforms","Microsoft Sentinel integrates with Microsoft Graph Security API data sources to enable monitoring, alerting, and hunting using your threat intelligence. Use this connector to send threat indicators to Microsoft Sentinel from your Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MindMeld, MISP, or other integrated applications. Threat indicators can include IP addresses, domains, URLs, and file hashes. For more information, see the [Microsoft Sentinel documentation >](https://go.microsoft.com/fwlink/p/?linkid=2223729&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""You can connect your threat intelligence data sources to Microsoft Sentinel by either:"", ""description"": ""- Using an integrated Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MindMeld, MISP, and others.\n\n- Calling the Microsoft Graph Security API directly from another application.""}, {""title"": ""Follow These Steps to Connect your Threat Intelligence:"", ""description"": ""1) [Register an application](https://docs.microsoft.com/graph/auth-v2-service#1-register-your-app) in Azure Active Directory.\n\n2) [Configure permissions](https://docs.microsoft.com/graph/auth-v2-service#2-configure-permissions-for-microsoft-graph) and be sure to add the ThreatIndicators.ReadWrite.OwnedBy permission to the application.\n\n3) Ask your Azure AD tenant administrator to [grant consent](https://docs.microsoft.com/graph/auth-v2-service#3-get-administrator-consent) to the application.\n\n4) Configure your TIP or other integrated application to push indicators to Microsoft Sentinel by specifying the following:\n\n a. The application ID and secret you received when registering the app (step 1 above). \n\n b. Set \u201cMicrosoft Sentinel\u201d as the target.\n\n c. Set an action for each indicator - \u2018alert\u2019 is most relevant for Microsoft Sentinel use cases \n\nFor the latest list of integrated Threat Intelligence Platforms and detailed configuration instructions, see the [full documentation](https://docs.microsoft.com/azure/sentinel/import-threat-intelligence#adding-threat-indicators-to-azure-sentinel-with-the-threat-intelligence-platforms-data-connector).\n\nClick on \""Connect\"" below\n\n> Data from all regions will be sent to and stored in the workspace's region."", ""instructions"": [{""parameters"": {}, ""type"": ""ThreatIntelligence""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""tenant"": [""GlobalAdmin"", ""SecurityAdmin""]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_ThreatIntelligence.json","true" +"ThreatIntelObjects","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligence","Microsoft","Threat Intelligence Platforms","Microsoft Sentinel integrates with Microsoft Graph Security API data sources to enable monitoring, alerting, and hunting using your threat intelligence. Use this connector to send threat indicators to Microsoft Sentinel from your Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MindMeld, MISP, or other integrated applications. Threat indicators can include IP addresses, domains, URLs, and file hashes. For more information, see the [Microsoft Sentinel documentation >](https://go.microsoft.com/fwlink/p/?linkid=2223729&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""You can connect your threat intelligence data sources to Microsoft Sentinel by either:"", ""description"": ""- Using an integrated Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MindMeld, MISP, and others.\n\n- Calling the Microsoft Graph Security API directly from another application.""}, {""title"": ""Follow These Steps to Connect your Threat Intelligence:"", ""description"": ""1) [Register an application](https://docs.microsoft.com/graph/auth-v2-service#1-register-your-app) in Azure Active Directory.\n\n2) [Configure permissions](https://docs.microsoft.com/graph/auth-v2-service#2-configure-permissions-for-microsoft-graph) and be sure to add the ThreatIndicators.ReadWrite.OwnedBy permission to the application.\n\n3) Ask your Azure AD tenant administrator to [grant consent](https://docs.microsoft.com/graph/auth-v2-service#3-get-administrator-consent) to the application.\n\n4) Configure your TIP or other integrated application to push indicators to Microsoft Sentinel by specifying the following:\n\n a. The application ID and secret you received when registering the app (step 1 above). \n\n b. Set \u201cMicrosoft Sentinel\u201d as the target.\n\n c. Set an action for each indicator - \u2018alert\u2019 is most relevant for Microsoft Sentinel use cases \n\nFor the latest list of integrated Threat Intelligence Platforms and detailed configuration instructions, see the [full documentation](https://docs.microsoft.com/azure/sentinel/import-threat-intelligence#adding-threat-indicators-to-azure-sentinel-with-the-threat-intelligence-platforms-data-connector).\n\nClick on \""Connect\"" below\n\n> Data from all regions will be sent to and stored in the workspace's region."", ""instructions"": [{""parameters"": {}, ""type"": ""ThreatIntelligence""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}], ""tenant"": [""GlobalAdmin"", ""SecurityAdmin""]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_ThreatIntelligence.json","true" +"ThreatIntelIndicators","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligenceTaxii","Microsoft","Threat intelligence - TAXII","Microsoft Sentinel integrates with TAXII 2.0 and 2.1 data sources to enable monitoring, alerting, and hunting using your threat intelligence. Use this connector to send the supported STIX object types from TAXII servers to Microsoft Sentinel. Threat indicators can include IP addresses, domains, URLs, and file hashes. For more information, see the [Microsoft Sentinel documentation >](https://go.microsoft.com/fwlink/p/?linkid=2224105&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Configure TAXII servers to stream STIX 2.0 or 2.1 STIX objects to Microsoft Sentinel"", ""description"": ""You can connect your TAXII servers to Microsoft Sentinel using the built-in TAXII connector. For detailed configuration instructions, see the [full documentation](https://docs.microsoft.com/azure/sentinel/import-threat-intelligence#adding-threat-indicators-to-azure-sentinel-with-the-threat-intelligence---taxii-data-connector). \n\nEnter the following information and select Add to configure your TAXII server."", ""instructions"": [{""parameters"": {}, ""type"": ""ThreatIntelligenceTaxii""}]}]","{""customs"": [{""name"": ""TAXII Server"", ""description"": ""TAXII 2.0 or TAXII 2.1 Server URI and Collection ID.""}], ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_ThreatIntelligenceTaxii.json","true" +"ThreatIntelObjects","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligenceTaxii","Microsoft","Threat intelligence - TAXII","Microsoft Sentinel integrates with TAXII 2.0 and 2.1 data sources to enable monitoring, alerting, and hunting using your threat intelligence. Use this connector to send the supported STIX object types from TAXII servers to Microsoft Sentinel. Threat indicators can include IP addresses, domains, URLs, and file hashes. For more information, see the [Microsoft Sentinel documentation >](https://go.microsoft.com/fwlink/p/?linkid=2224105&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""Configure TAXII servers to stream STIX 2.0 or 2.1 STIX objects to Microsoft Sentinel"", ""description"": ""You can connect your TAXII servers to Microsoft Sentinel using the built-in TAXII connector. For detailed configuration instructions, see the [full documentation](https://docs.microsoft.com/azure/sentinel/import-threat-intelligence#adding-threat-indicators-to-azure-sentinel-with-the-threat-intelligence---taxii-data-connector). \n\nEnter the following information and select Add to configure your TAXII server."", ""instructions"": [{""parameters"": {}, ""type"": ""ThreatIntelligenceTaxii""}]}]","{""customs"": [{""name"": ""TAXII Server"", ""description"": ""TAXII 2.0 or TAXII 2.1 Server URI and Collection ID.""}], ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_ThreatIntelligenceTaxii.json","true" +"ThreatIntelExportOperation","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligenceTaxiiExport","Microsoft","Threat intelligence - TAXII Export (Preview)","Microsoft Sentinel integrates with TAXII 2.1 servers to enable exporting of your threat intelligence objects. Use this connector to send the supported STIX object types from Microsoft Sentinel to TAXII servers.","[{""title"": ""Configure TAXII servers to export STIX 2.1 objects to. Once configured, you can start exporting STIX objects from your TI repository"", ""instructions"": [{""parameters"": {""isExport"": true}, ""type"": ""ThreatIntelligenceTaxii""}]}]","{""customs"": [{""name"": ""TAXII Server"", ""description"": ""TAXII 2.1 Server URL and Collection ID.""}], ""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_ThreatIntelligenceTaxiiExport.json","true" +"ThreatIntelIndicators","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligenceUploadIndicatorsAPI","Microsoft","Threat Intelligence Upload API (Preview)","Microsoft Sentinel offers a data plane API to bring in threat intelligence from your Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MineMeld, MISP, or other integrated applications. Threat indicators can include IP addresses, domains, URLs, file hashes and email addresses. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2269830&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""You can connect your threat intelligence data sources to Microsoft Sentinel by either: "", ""description"": ""\n>Using an integrated Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MineMeld, MISP, and others. \n\n>Calling the Microsoft Sentinel data plane API directly from another application. \n - Note: The 'Status' of the connector will not appear as 'Connected' here, because the data is ingested by making an API call.""}, {""title"": ""Follow These Steps to Connect to your Threat Intelligence: "", ""description"": """"}, {""title"": ""1. Get Microsoft Entra ID Access Token"", ""description"": ""To send request to the APIs, you need to acquire Microsoft Entra ID access token. You can follow instruction in this page: https://docs.microsoft.com/azure/databricks/dev-tools/api/latest/aad/app-aad-token#get-an-azure-ad-access-token \n - Notice: Please request Microsoft Entra ID access token with scope value: \nFairfax: https://management.usgovcloudapi.net/.default \nMooncake: https://management.chinacloudapi.cn/.default ""}, {""title"": ""2. Send STIX objects to Sentinel"", ""description"": ""You can send the supported STIX object types by calling our Upload API. For more information about the API, click [here](https://learn.microsoft.com/azure/sentinel/stix-objects-api). \n\n>HTTP method: POST \n\n>Endpoint: \nFairfax: https://api.ti.sentinel.azure.us/workspaces/[WorkspaceID]/threatintelligence-stix-objects:upload?api-version=2024-02-01-preview \nMooncake: https://api.ti.sentinel.azure.cn/workspaces/[WorkspaceID]/threatintelligence-stix-objects:upload?api-version=2024-02-01-preview \n\n>WorkspaceID: the workspace that the STIX objects are uploaded to. \n\n\n>Header Value 1: \""Authorization\"" = \""Bearer [Microsoft Entra ID Access Token from step 1]\"" \n\n\n> Header Value 2: \""Content-Type\"" = \""application/json\"" \n \n>Body: The body is a JSON object containing an array of STIX objects.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.SecurityInsights/threatintelligence/write"", ""permissionsDisplayText"": ""write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_ThreatIntelligenceUploadIndicators_ForGov.json","true" +"ThreatIntelObjects","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligenceUploadIndicatorsAPI","Microsoft","Threat Intelligence Upload API (Preview)","Microsoft Sentinel offers a data plane API to bring in threat intelligence from your Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MineMeld, MISP, or other integrated applications. Threat indicators can include IP addresses, domains, URLs, file hashes and email addresses. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2269830&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""You can connect your threat intelligence data sources to Microsoft Sentinel by either: "", ""description"": ""\n>Using an integrated Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MineMeld, MISP, and others. \n\n>Calling the Microsoft Sentinel data plane API directly from another application. \n - Note: The 'Status' of the connector will not appear as 'Connected' here, because the data is ingested by making an API call.""}, {""title"": ""Follow These Steps to Connect to your Threat Intelligence: "", ""description"": """"}, {""title"": ""1. Get Microsoft Entra ID Access Token"", ""description"": ""To send request to the APIs, you need to acquire Microsoft Entra ID access token. You can follow instruction in this page: https://docs.microsoft.com/azure/databricks/dev-tools/api/latest/aad/app-aad-token#get-an-azure-ad-access-token \n - Notice: Please request Microsoft Entra ID access token with scope value: \nFairfax: https://management.usgovcloudapi.net/.default \nMooncake: https://management.chinacloudapi.cn/.default ""}, {""title"": ""2. Send STIX objects to Sentinel"", ""description"": ""You can send the supported STIX object types by calling our Upload API. For more information about the API, click [here](https://learn.microsoft.com/azure/sentinel/stix-objects-api). \n\n>HTTP method: POST \n\n>Endpoint: \nFairfax: https://api.ti.sentinel.azure.us/workspaces/[WorkspaceID]/threatintelligence-stix-objects:upload?api-version=2024-02-01-preview \nMooncake: https://api.ti.sentinel.azure.cn/workspaces/[WorkspaceID]/threatintelligence-stix-objects:upload?api-version=2024-02-01-preview \n\n>WorkspaceID: the workspace that the STIX objects are uploaded to. \n\n\n>Header Value 1: \""Authorization\"" = \""Bearer [Microsoft Entra ID Access Token from step 1]\"" \n\n\n> Header Value 2: \""Content-Type\"" = \""application/json\"" \n \n>Body: The body is a JSON object containing an array of STIX objects.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.SecurityInsights/threatintelligence/write"", ""permissionsDisplayText"": ""write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_ThreatIntelligenceUploadIndicators_ForGov.json","true" +"ThreatIntelIndicators","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligenceUploadIndicatorsAPI","Microsoft","Threat Intelligence Upload API (Preview)","Microsoft Sentinel offers a data plane API to bring in threat intelligence from your Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MineMeld, MISP, or other integrated applications. Threat indicators can include IP addresses, domains, URLs, file hashes and email addresses. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2269830&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""You can connect your threat intelligence data sources to Microsoft Sentinel by either: "", ""description"": ""\n>Using an integrated Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MineMeld, MISP, and others. \n\n>Calling the Microsoft Sentinel data plane API directly from another application. \n - Note: The 'Status' of the connector will not appear as 'Connected' here, because the data is ingested by making an API call.""}, {""title"": ""Follow These Steps to Connect to your Threat Intelligence: "", ""description"": """"}, {""title"": ""1. Get Microsoft Entra ID Access Token"", ""description"": ""To send request to the APIs, you need to acquire Microsoft Entra ID access token. You can follow instruction in this page: https://docs.microsoft.com/azure/databricks/dev-tools/api/latest/aad/app-aad-token#get-an-azure-ad-access-token \n - Notice: Please request Microsoft Entra ID access token with scope value: https://management.azure.com/.default ""}, {""title"": ""2. Send STIX objects to Sentinel"", ""description"": ""You can send the supported STIX object types by calling our Upload API. For more information about the API, click [here](https://learn.microsoft.com/azure/sentinel/stix-objects-api). \n\n>HTTP method: POST \n\n>Endpoint: https://api.ti.sentinel.azure.com/workspaces/[WorkspaceID]/threatintelligence-stix-objects:upload?api-version=2024-02-01-preview \n\n>WorkspaceID: the workspace that the STIX objects are uploaded to. \n\n\n>Header Value 1: \""Authorization\"" = \""Bearer [Microsoft Entra ID Access Token from step 1]\"" \n\n\n> Header Value 2: \""Content-Type\"" = \""application/json\"" \n \n>Body: The body is a JSON object containing an array of STIX objects.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.SecurityInsights/threatintelligence/write"", ""permissionsDisplayText"": ""write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_ThreatIntelligenceUploadIndicators.json","true" +"ThreatIntelObjects","Threat Intelligence (NEW)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29","azuresentinel","azure-sentinel-solution-threatintelligence-updated","2025-04-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","ThreatIntelligenceUploadIndicatorsAPI","Microsoft","Threat Intelligence Upload API (Preview)","Microsoft Sentinel offers a data plane API to bring in threat intelligence from your Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MineMeld, MISP, or other integrated applications. Threat indicators can include IP addresses, domains, URLs, file hashes and email addresses. For more information, see the [Microsoft Sentinel documentation](https://go.microsoft.com/fwlink/p/?linkid=2269830&wt.mc_id=sentinel_dataconnectordocs_content_cnl_csasci).","[{""title"": ""You can connect your threat intelligence data sources to Microsoft Sentinel by either: "", ""description"": ""\n>Using an integrated Threat Intelligence Platform (TIP), such as Threat Connect, Palo Alto Networks MineMeld, MISP, and others. \n\n>Calling the Microsoft Sentinel data plane API directly from another application. \n - Note: The 'Status' of the connector will not appear as 'Connected' here, because the data is ingested by making an API call.""}, {""title"": ""Follow These Steps to Connect to your Threat Intelligence: "", ""description"": """"}, {""title"": ""1. Get Microsoft Entra ID Access Token"", ""description"": ""To send request to the APIs, you need to acquire Microsoft Entra ID access token. You can follow instruction in this page: https://docs.microsoft.com/azure/databricks/dev-tools/api/latest/aad/app-aad-token#get-an-azure-ad-access-token \n - Notice: Please request Microsoft Entra ID access token with scope value: https://management.azure.com/.default ""}, {""title"": ""2. Send STIX objects to Sentinel"", ""description"": ""You can send the supported STIX object types by calling our Upload API. For more information about the API, click [here](https://learn.microsoft.com/azure/sentinel/stix-objects-api). \n\n>HTTP method: POST \n\n>Endpoint: https://api.ti.sentinel.azure.com/workspaces/[WorkspaceID]/threatintelligence-stix-objects:upload?api-version=2024-02-01-preview \n\n>WorkspaceID: the workspace that the STIX objects are uploaded to. \n\n\n>Header Value 1: \""Authorization\"" = \""Bearer [Microsoft Entra ID Access Token from step 1]\"" \n\n\n> Header Value 2: \""Content-Type\"" = \""application/json\"" \n \n>Body: The body is a JSON object containing an array of STIX objects.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.SecurityInsights/threatintelligence/write"", ""permissionsDisplayText"": ""write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20%28NEW%29/Data%20Connectors/template_ThreatIntelligenceUploadIndicators.json","true" +"","Threat Intelligence Solution for Azure Government","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Threat%20Intelligence%20Solution%20for%20Azure%20Government","azuresentinel","azure-sentinel-solution-threatintelligenceazuregov","2023-03-06","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"","ThreatAnalysis&Response","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ThreatAnalysis%26Response","azuresentinel","azure-sentinel-solution-mitreattck","2021-10-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","ThreatConnect","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ThreatConnect","threatconnectinc1694630408738","sentinel-threatconnect-byol-enterprise","2023-09-11","2023-09-11","","ThreatConnect, Inc.","Partner","https://threatconnect.com/contact/","","domains","","","","","","","","false" +"","ThreatXCloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ThreatXCloud","azuresentinel","azure-sentinel-solution-threatxwaf","2022-09-23","2022-09-23","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"Tomcat_CL","Tomcat","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tomcat","azuresentinel","azure-sentinel-solution-apachetomcat","2022-01-31","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","ApacheTomcat","Apache","[Deprecated] Apache Tomcat","The Apache Tomcat solution provides the capability to ingest [Apache Tomcat](http://tomcat.apache.org/) events into Microsoft Sentinel. Refer to [Apache Tomcat documentation](http://tomcat.apache.org/tomcat-10.0-doc/logging.html) for more information.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias TomcatEvent and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tomcat/Parsers/TomcatEvent.txt).The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": """", ""description"": "">**NOTE:** This data connector has been developed using Apache Tomcat version 10.0.4"", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Apache Tomcat Server where the logs are generated.\n\n> Logs from Apache Tomcat Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the custom log directory to be collected"", ""instructions"": [{""parameters"": {""linkType"": ""OpenCustomLogsSettings""}, ""type"": ""InstallAgent""}]}, {""title"": """", ""description"": ""1. Select the link above to open your workspace advanced settings \n2. From the left pane, select **Data**, select **Custom Logs** and click **Add+**\n3. Click **Browse** to upload a sample of a Tomcat log file (e.g. access.log or error.log). Then, click **Next >**\n4. Select **New line** as the record delimiter and click **Next >**\n5. Select **Windows** or **Linux** and enter the path to Tomcat logs based on your configuration. Example: \n - **Linux** Directory: '/var/log/tomcat/*.log' \n6. After entering the path, click the '+' symbol to apply, then click **Next >** \n7. Add **Tomcat_CL** as the custom log Name and click **Done**""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Tomcat/Data%20Connectors/Connector_Tomcat_agent.json","true" +"","Torq","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Torq","torqtechnologiesltd2020","torq_sentinel_solution","2024-12-24","","","Torq Support Team","Partner","https://support.torq.io","","domains","","","","","","","","false" +"","Training","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Training","","","","","","","","","","","","","","","","","","false" +"","TransmitSecurity","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/TransmitSecurity","transmitsecurity","microsoft-sentinel-solution-transmitsecurity","2024-06-10","2024-11-20","","Transmit Security","Partner","https://transmitsecurity.com/support","","domains","","","","","","","","false" +"CommonSecurityLog","Trend Micro Apex One","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Apex%20One","azuresentinel","azure-sentinel-solution-trendmicroapexone","2021-07-06","2022-03-24","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","TrendMicroApexOne","Trend Micro","[Deprecated] Trend Micro Apex One via Legacy Agent","The [Trend Micro Apex One](https://www.trendmicro.com/en_us/business/products/user-protection/sps/endpoint.html) data connector provides the capability to ingest [Trend Micro Apex One events](https://aka.ms/sentinel-TrendMicroApex-OneEvents) into Microsoft Sentinel. Refer to [Trend Micro Apex Central](https://aka.ms/sentinel-TrendMicroApex-OneCentral) for more information.","[{""title"": """", ""description"": "">This data connector depends on a parser based on a Kusto Function to work as expected [**TMApexOneEvent**](https://aka.ms/sentinel-TMApexOneEvent-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**NOTE:** This data connector has been developed using Trend Micro Apex Central 2019"", ""instructions"": []}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""[Follow these steps](https://docs.trendmicro.com/en-us/enterprise/trend-micro-apex-central-2019-online-help/detections/logs_001/syslog-forwarding.aspx) to configure Apex Central sending alerts via syslog. While configuring, on step 6, select the log format **CEF**.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Apex%20One/Data%20Connectors/TrendMicro_ApexOne.json","true" +"CommonSecurityLog","Trend Micro Apex One","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Apex%20One","azuresentinel","azure-sentinel-solution-trendmicroapexone","2021-07-06","2022-03-24","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","TrendMicroApexOneAma","Trend Micro","[Deprecated] Trend Micro Apex One via AMA","The [Trend Micro Apex One](https://www.trendmicro.com/en_us/business/products/user-protection/sps/endpoint.html) data connector provides the capability to ingest [Trend Micro Apex One events](https://aka.ms/sentinel-TrendMicroApex-OneEvents) into Microsoft Sentinel. Refer to [Trend Micro Apex Central](https://aka.ms/sentinel-TrendMicroApex-OneCentral) for more information.","[{""title"": """", ""description"": "">This data connector depends on a parser based on a Kusto Function to work as expected [**TMApexOneEvent**](https://aka.ms/sentinel-TMApexOneEvent-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine"", ""instructions"": []}, {""title"": ""Step B. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""[Follow these steps](https://docs.trendmicro.com/en-us/enterprise/trend-micro-apex-central-2019-online-help/detections/logs_001/syslog-forwarding.aspx) to configure Apex Central sending alerts via syslog. While configuring, on step 6, select the log format **CEF**."", ""instructions"": []}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Apex%20One/Data%20Connectors/template_TrendMicro_ApexOneAMA.json","true" +"TrendMicroCAS_CL","Trend Micro Cloud App Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Cloud%20App%20Security","azuresentinel","azuresentinel.trendmicrocas","2021-09-28","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","TrendMicroCAS","Trend Micro","Trend Micro Cloud App Security","The [Trend Micro Cloud App Security](https://www.trendmicro.com/en_be/business/products/user-protection/sps/email-and-collaboration/cloud-app-security.html) data connector provides the capability to retrieve security event logs of the services that Cloud App Security protects and more events into Microsoft Sentinel through the Log Retrieval API. Refer to API [documentation](https://docs.trendmicro.com/en-us/enterprise/cloud-app-security-integration-api-online-help/supported-cloud-app-/log-retrieval-api/get-security-logs.aspx) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Azure Blob Storage API to pull logs into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**TrendMicroCAS**](https://aka.ms/sentinel-TrendMicroCAS-parser) which is deployed with the Microsoft Sentinel Solution.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Trend Micro Log Retrieval API**\n\n Follow the instructions to obtain the credentials.\n\n1. Obtain the **TrendMicroCASToken** using the [documentation](https://docs.trendmicro.com/en-us/enterprise/cloud-app-security-integration-api-online-help/getting-started-with/generating-an-authen.aspx).\n2. Save credentials for using in the data connector.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Trend Micro Cloud App Security data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Trend Micro Cloud App Security data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-TrendMicroCAS-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **TrendMicroCASToken**, **TrendMicroCASServiceURL** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Trend Micro Cloud App Security data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-TMCASAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. TMCASXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tTrendMicroCASToken\n\t\tTrendMicroCASServiceURL\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**TrendMicroCASToken** and **TrendMicroCASServiceURL** are required for making API calls. See the [documentation](https://docs.trendmicro.com/en-us/enterprise/cloud-app-security-integration-api-online-help/getting-started-with/using-cloud-app-secu.aspx) to learn more about API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Cloud%20App%20Security/Data%20Connectors/TerndMicroCAS_API_FunctionApp.json","true" +"CommonSecurityLog","Trend Micro Deep Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Deep%20Security","trendmicro","trend_micro_deep_security_mss","2022-05-10","","","Trend Micro","Partner","https://success.trendmicro.com/dcx/s/?language=en_US","","domains","TrendMicro","Trend Micro","[Deprecated] Trend Micro Deep Security via Legacy","The Trend Micro Deep Security connector allows you to easily connect your Deep Security logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's networks/systems and improves your security operation capabilities.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Trend Micro Deep Security logs to Syslog agent"", ""description"": ""1. Set your security solution to send Syslog messages in CEF format to the proxy machine. Make sure to send the logs to port 514 TCP on the machine's IP address.\n2. Forward Trend Micro Deep Security events to the Syslog agent.\n3. Define a new Syslog Configuration that uses the CEF format by referencing [this knowledge article](https://aka.ms/Sentinel-trendmicro-kblink) for additional information.\n4. Configure the Deep Security Manager to use this new configuration to forward events to the Syslog agent using [these instructions](https://aka.ms/Sentinel-trendMicro-connectorInstructions).\n5. Make sure to save the [TrendMicroDeepSecurity](https://aka.ms/TrendMicroDeepSecurityFunction) function so that it queries the Trend Micro Deep Security data properly.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Deep%20Security/Data%20Connectors/TrendMicroDeepSecurity.json","true" +"CommonSecurityLog","Trend Micro TippingPoint","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20TippingPoint","trendmicro","trend_micro_tippingpoint_mss","2022-05-02","","","Trend Micro","Partner","https://success.trendmicro.com/dcx/s/contactus?language=en_US","","domains","TrendMicroTippingPoint","Trend Micro","[Deprecated] Trend Micro TippingPoint via Legacy","The Trend Micro TippingPoint connector allows you to easily connect your TippingPoint SMS IPS events with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's networks/systems and improves your security operation capabilities.","[{""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias TrendMicroTippingPoint and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20TippingPoint/Parsers/TrendMicroTippingPoint).The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Trend Micro TippingPoint SMS logs to Syslog agent"", ""description"": ""Set your TippingPoint SMS to send Syslog messages in ArcSight CEF Format v4.2 format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20TippingPoint/Data%20Connectors/TrendMicroTippingPoint.json","true" +"TrendMicro_XDR_OAT_CL","Trend Micro Vision One","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Vision%20One","trendmicro","trend_micro_vision_one_xdr_mss","2022-05-11","2024-07-16","","Trend Micro","Partner","https://success.trendmicro.com/dcx/s/?language=en_US","","domains","TrendMicroXDR","Trend Micro","Trend Vision One","The [Trend Vision One](https://www.trendmicro.com/en_us/business/products/detection-response/xdr.html) connector allows you to easily connect your Workbench alert data with Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities. This gives you more insight into your organization's networks/systems and improves your security operation capabilities.

The Trend Vision One connector is supported in Microsoft Sentinel in the following regions: Australia East, Australia Southeast, Brazil South, Canada Central, Canada East, Central India, Central US, East Asia, East US, East US 2, France Central, Japan East, Korea Central, North Central US, North Europe, Norway East, South Africa North, South Central US, Southeast Asia, Sweden Central, Switzerland North, UAE North, UK South, UK West, West Europe, West US, West US 2, West US 3.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Trend Vision One API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Trend Vision One API**\n\n [Follow these instructions](https://docs.trendmicro.com/documentation/article/trend-vision-one-api-keys-third-party-apps) to create an account and an API authentication token.""}, {""title"": """", ""description"": ""**STEP 2 - Use the below deployment option to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Trend Vision One connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Trend Vision One API Authorization Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template Deployment"", ""description"": ""This method provides an automated deployment of the Trend Vision One connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-trendmicroxdr-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter a unique **Function Name**, **Workspace ID**, **Workspace Key**, **API Token** and **Region Code**. \n - Note: Provide the appropriate region code based on where your Trend Vision One instance is deployed: us, eu, au, in, sg, jp \n - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Trend Vision One API Token"", ""description"": ""A Trend Vision One API Token is required. See the documentation to learn more about the [Trend Vision One API](https://docs.trendmicro.com/documentation/article/trend-vision-one-api-keys-third-party-apps).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Vision%20One/Data%20Connectors/TrendMicroXDR.json","true" +"TrendMicro_XDR_RCA_Result_CL","Trend Micro Vision One","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Vision%20One","trendmicro","trend_micro_vision_one_xdr_mss","2022-05-11","2024-07-16","","Trend Micro","Partner","https://success.trendmicro.com/dcx/s/?language=en_US","","domains","TrendMicroXDR","Trend Micro","Trend Vision One","The [Trend Vision One](https://www.trendmicro.com/en_us/business/products/detection-response/xdr.html) connector allows you to easily connect your Workbench alert data with Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities. This gives you more insight into your organization's networks/systems and improves your security operation capabilities.

The Trend Vision One connector is supported in Microsoft Sentinel in the following regions: Australia East, Australia Southeast, Brazil South, Canada Central, Canada East, Central India, Central US, East Asia, East US, East US 2, France Central, Japan East, Korea Central, North Central US, North Europe, Norway East, South Africa North, South Central US, Southeast Asia, Sweden Central, Switzerland North, UAE North, UK South, UK West, West Europe, West US, West US 2, West US 3.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Trend Vision One API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Trend Vision One API**\n\n [Follow these instructions](https://docs.trendmicro.com/documentation/article/trend-vision-one-api-keys-third-party-apps) to create an account and an API authentication token.""}, {""title"": """", ""description"": ""**STEP 2 - Use the below deployment option to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Trend Vision One connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Trend Vision One API Authorization Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template Deployment"", ""description"": ""This method provides an automated deployment of the Trend Vision One connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-trendmicroxdr-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter a unique **Function Name**, **Workspace ID**, **Workspace Key**, **API Token** and **Region Code**. \n - Note: Provide the appropriate region code based on where your Trend Vision One instance is deployed: us, eu, au, in, sg, jp \n - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Trend Vision One API Token"", ""description"": ""A Trend Vision One API Token is required. See the documentation to learn more about the [Trend Vision One API](https://docs.trendmicro.com/documentation/article/trend-vision-one-api-keys-third-party-apps).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Vision%20One/Data%20Connectors/TrendMicroXDR.json","true" +"TrendMicro_XDR_RCA_Task_CL","Trend Micro Vision One","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Vision%20One","trendmicro","trend_micro_vision_one_xdr_mss","2022-05-11","2024-07-16","","Trend Micro","Partner","https://success.trendmicro.com/dcx/s/?language=en_US","","domains","TrendMicroXDR","Trend Micro","Trend Vision One","The [Trend Vision One](https://www.trendmicro.com/en_us/business/products/detection-response/xdr.html) connector allows you to easily connect your Workbench alert data with Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities. This gives you more insight into your organization's networks/systems and improves your security operation capabilities.

The Trend Vision One connector is supported in Microsoft Sentinel in the following regions: Australia East, Australia Southeast, Brazil South, Canada Central, Canada East, Central India, Central US, East Asia, East US, East US 2, France Central, Japan East, Korea Central, North Central US, North Europe, Norway East, South Africa North, South Central US, Southeast Asia, Sweden Central, Switzerland North, UAE North, UK South, UK West, West Europe, West US, West US 2, West US 3.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Trend Vision One API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Trend Vision One API**\n\n [Follow these instructions](https://docs.trendmicro.com/documentation/article/trend-vision-one-api-keys-third-party-apps) to create an account and an API authentication token.""}, {""title"": """", ""description"": ""**STEP 2 - Use the below deployment option to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Trend Vision One connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Trend Vision One API Authorization Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template Deployment"", ""description"": ""This method provides an automated deployment of the Trend Vision One connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-trendmicroxdr-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter a unique **Function Name**, **Workspace ID**, **Workspace Key**, **API Token** and **Region Code**. \n - Note: Provide the appropriate region code based on where your Trend Vision One instance is deployed: us, eu, au, in, sg, jp \n - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Trend Vision One API Token"", ""description"": ""A Trend Vision One API Token is required. See the documentation to learn more about the [Trend Vision One API](https://docs.trendmicro.com/documentation/article/trend-vision-one-api-keys-third-party-apps).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Vision%20One/Data%20Connectors/TrendMicroXDR.json","true" +"TrendMicro_XDR_WORKBENCH_CL","Trend Micro Vision One","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Vision%20One","trendmicro","trend_micro_vision_one_xdr_mss","2022-05-11","2024-07-16","","Trend Micro","Partner","https://success.trendmicro.com/dcx/s/?language=en_US","","domains","TrendMicroXDR","Trend Micro","Trend Vision One","The [Trend Vision One](https://www.trendmicro.com/en_us/business/products/detection-response/xdr.html) connector allows you to easily connect your Workbench alert data with Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities. This gives you more insight into your organization's networks/systems and improves your security operation capabilities.

The Trend Vision One connector is supported in Microsoft Sentinel in the following regions: Australia East, Australia Southeast, Brazil South, Canada Central, Canada East, Central India, Central US, East Asia, East US, East US 2, France Central, Japan East, Korea Central, North Central US, North Europe, Norway East, South Africa North, South Central US, Southeast Asia, Sweden Central, Switzerland North, UAE North, UK South, UK West, West Europe, West US, West US 2, West US 3.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Trend Vision One API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Trend Vision One API**\n\n [Follow these instructions](https://docs.trendmicro.com/documentation/article/trend-vision-one-api-keys-third-party-apps) to create an account and an API authentication token.""}, {""title"": """", ""description"": ""**STEP 2 - Use the below deployment option to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Trend Vision One connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Trend Vision One API Authorization Token, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template Deployment"", ""description"": ""This method provides an automated deployment of the Trend Vision One connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-trendmicroxdr-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter a unique **Function Name**, **Workspace ID**, **Workspace Key**, **API Token** and **Region Code**. \n - Note: Provide the appropriate region code based on where your Trend Vision One instance is deployed: us, eu, au, in, sg, jp \n - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Trend Vision One API Token"", ""description"": ""A Trend Vision One API Token is required. See the documentation to learn more about the [Trend Vision One API](https://docs.trendmicro.com/documentation/article/trend-vision-one-api-keys-third-party-apps).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Trend%20Micro%20Vision%20One/Data%20Connectors/TrendMicroXDR.json","true" +"","UEBA Essentials","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/UEBA%20Essentials","azuresentinel","azure-sentinel-solution-uebaessentials","2022-06-27","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"","URLhaus","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/URLhaus","azuresentinel","azure-sentinel-solution-urlhaus","2022-09-29","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"Ubiquiti_CL","Ubiquiti UniFi","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Ubiquiti%20UniFi","azuresentinel","azure-sentinel-solution-ubiquitiunifi","2022-06-01","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","UbiquitiUnifi","Ubiquiti","[Deprecated] Ubiquiti UniFi","The [Ubiquiti UniFi](https://www.ui.com/) data connector provides the capability to ingest [Ubiquiti UniFi firewall, dns, ssh, AP events](https://help.ui.com/hc/en-us/articles/204959834-UniFi-How-to-View-Log-Files) into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected [**UbiquitiAuditEvent**](https://aka.ms/sentinel-UbiquitiUnifi-parser) which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**NOTE:** This data connector has been developed using Enterprise System Controller Release Version: 5.6.2 (Syslog)"", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Server to which the Ubiquiti logs are forwarder from Ubiquiti device (e.g.remote syslog server)\n\n> Logs from Ubiquiti Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Follow the configuration steps below to get Ubiquiti logs into Microsoft Sentinel. Refer to the [Azure Monitor Documentation](https://docs.microsoft.com/azure/azure-monitor/agents/data-sources-json) for more details on these steps.\n1. Configure log forwarding on your Ubiquiti controller: \n\n\t i. Go to Settings > System Setting > Controller Configuration > Remote Logging and enable the Syslog and Debugging (optional) logs (Refer to [User Guide](https://dl.ui.com/guides/UniFi/UniFi_Controller_V5_UG.pdf) for detailed instructions).\n2. Download config file [Ubiquiti.conf](https://aka.ms/sentinel-UbiquitiUnifi-conf).\n3. Login to the server where you have installed Azure Log Analytics agent.\n4. Copy Ubiquiti.conf to the /etc/opt/microsoft/omsagent/**workspace_id**/conf/omsagent.d/ folder.\n5. Edit Ubiquiti.conf as follows:\n\n\t i. specify port which you have set your Ubiquiti device to forward logs to (line 4)\n\n\t ii. replace **workspace_id** with real value of your Workspace ID (lines 14,15,16,19)\n5. Save changes and restart the Azure Log Analytics agent for Linux service with the following command:\n\t\tsudo /opt/microsoft/omsagent/bin/service_control restart"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Ubiquiti%20UniFi/Data%20Connectors/Connector_Ubiquiti_agent.json","true" +"ThreatIntelligenceIndicator","VMRay","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMRay","vmraygmbh1623334327435","microsoft-sentinel-solution-vmray","2025-07-23","","","VMRay","Partner","https://www.vmray.com/contact/customer-support/","","domains","VMRay","VMRay","VMRayThreatIntelligence","VMRayThreatIntelligence connector automatically generates and feeds threat intelligence for all submissions to VMRay, improving threat detection and incident response in Sentinel. This seamless integration empowers teams to proactively address emerging threats.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the VMRay API to pull VMRay Threat IOCs into Microsoft Sentinel. This might result in additional costs for data ingestion and for storing data in Azure Blob Storage costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) and [Azure Blob Storage pricing page](https://azure.microsoft.com/pricing/details/storage/blobs/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Deploy VMRay Threat Intelligence Connector"", ""description"": ""1. Ensure you have all the required prerequisites: **Client ID**, **Tenant ID**, **Client Secret**, **VMRay API Key**, and **VMRay Base URL**.\n2. To obtain the Client ID, Client Secret, and Tenant ID, [follow these instructions](https://github.com/Azure/Azure-Sentinel/tree/master/Solutions/VMRay#vmray-configurations)\n3. For the **Flex Consumption Plan**, click the **Deploy to Azure** button below:\n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-VMRay-azuredeployflex)\n\n4. For the **Premium Plan**, click the **Deploy to Azure** button below:\n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-VMRay-azuredeploypremium).""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Azure Subscription"", ""description"": ""Azure Subscription with owner role is required to register an application in azure active directory() and assign role of contributor to app in resource group.""}, {""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**VMRay API Key** is required.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMRay/Data%20Connectors/VMRayThreatIntelligence_FunctionApp.json","true" +"Syslog","VMWareESXi","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMWareESXi","azuresentinel","azure-sentinel-solution-vmwareesxi","2022-01-12","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","VMwareESXi","VMWare","[Deprecated] VMware ESXi","The [VMware ESXi](https://www.vmware.com/products/esxi-and-esx.html) connector allows you to easily connect your VMWare ESXi logs with Microsoft Sentinel This gives you more insight into your organization's ESXi servers and improves your security operation capabilities.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias VMwareESXi and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMWareESXi/Parsers/VMwareESXi.yaml), on the second line of the query, enter the hostname(s) of your VMwareESXi device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n 1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n 2. Select **Apply below configuration to my machines** and select the facilities and severities.\n 3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}, {""title"": ""3. Configure and connect the VMware ESXi"", ""description"": ""1. Follow these instructions to configure the VMWare ESXi to forward syslog: \n - [VMware ESXi 3.5 and 4.x](https://kb.vmware.com/s/article/1016621) \n - [VMware ESXi 5.0+](https://docs.vmware.com/en/VMware-vSphere/5.5/com.vmware.vsphere.monitoring.doc/GUID-9F67DB52-F469-451F-B6C8-DAE8D95976E7.html)\n2. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""VMwareESXi"", ""description"": ""must be configured to export logs via Syslog""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMWareESXi/Data%20Connectors/Connector_Syslog_VMwareESXi.json","true" +"CarbonBlackAuditLogs_CL","VMware Carbon Black Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud","azuresentinel","azure-sentinel-solution-vmwarecarbonblack","2022-06-01","","","Microsoft","Microsoft","https://support.microsoft.com/","","domains","VMwareCarbonBlack","VMware","VMware Carbon Black Cloud","The [VMware Carbon Black Cloud](https://www.vmware.com/products/carbon-black-cloud.html) connector provides the capability to ingest Carbon Black data into Microsoft Sentinel. The connector provides visibility into Audit, Notification and Event logs in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to VMware Carbon Black to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the VMware Carbon Black API**\n\n [Follow these instructions](https://developer.carbonblack.com/reference/carbon-black-cloud/authentication/#creating-an-api-key) to create an API Key.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the VMware Carbon Black connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the VMware Carbon Black API Authorization Key(s), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""This method provides an automated deployment of the VMware Carbon Black connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelcarbonblackazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelcarbonblackazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **Log Types**, **API ID(s)**, **API Key(s)**, **Carbon Black Org Key**, **S3 Bucket Name**, **AWS Access Key Id**, **AWS Secret Access Key**, **EventPrefixFolderName**,**AlertPrefixFolderName**, and validate the **URI**.\n> - Enter the URI that corresponds to your region. The complete list of API URLs can be [found here](https://community.carbonblack.com/t5/Knowledge-Base/PSC-What-URLs-are-used-to-access-the-APIs/ta-p/67346)\n - The default **Time Interval** is set to pull the last five (5) minutes of data. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion. \n - Carbon Black requires a seperate set of API ID/Keys to ingest Notification alerts. Enter the SIEM API ID/Key values or leave blank, if not required. \n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the VMware Carbon Black connector manually with Azure Functions.""}, {""title"": """", ""description"": ""**1. Create a Function App**\n\n1. From the Azure Portal, navigate to [Function App](https://portal.azure.com/#blade/HubsExtension/BrowseResource/resourceType/Microsoft.Web%2Fsites/kind/functionapp), and select **+ Add**.\n2. In the **Basics** tab, ensure Runtime stack is set to **Powershell Core**. \n3. In the **Hosting** tab, ensure the **Consumption (Serverless)** plan type is selected.\n4. Make other preferrable configuration changes, if needed, then click **Create**.""}, {""title"": """", ""description"": ""**2. Import Function App Code**\n\n1. In the newly created Function App, select **Functions** on the left pane and click **+ Add**.\n2. Select **Timer Trigger**.\n3. Enter a unique Function **Name** and modify the cron schedule, if needed. The default value is set to run the Function App every 5 minutes. (Note: the Timer trigger should match the `timeInterval` value below to prevent overlapping data), click **Create**.\n4. Click on **Code + Test** on the left pane. \n5. Copy the [Function App Code](https://aka.ms/sentinelcarbonblackazurefunctioncode) and paste into the Function App `run.ps1` editor.\n5. Click **Save**.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following thirteen to sixteen (13-16) application settings individually, with their respective string values (case-sensitive): \n\t\tapiId\n\t\tapiKey\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\turi\n\t\ttimeInterval\n\t\tCarbonBlackOrgKey\n\t\tCarbonBlackLogTypes \n\t\ts3BucketName \n\t\tEventPrefixFolderName \n\t\tAlertPrefixFolderName \n\t\tAWSAccessKeyId \n\t\tAWSSecretAccessKey \n\t\tSIEMapiId (Optional)\n\t\tSIEMapiKey (Optional)\n\t\tlogAnalyticsUri (optional) \n> - Enter the URI that corresponds to your region. The complete list of API URLs can be [found here](https://community.carbonblack.com/t5/Knowledge-Base/PSC-What-URLs-are-used-to-access-the-APIs/ta-p/67346). The `uri` value must follow the following schema: `https://.conferdeploy.net` - There is no need to add a time suffix to the URI, the Function App will dynamically append the Time Value to the URI in the proper format.\n> - Set the `timeInterval` (in minutes) to the default value of `5` to correspond to the default Timer Trigger of every `5` minutes. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly to prevent overlapping data ingestion.\n> - Carbon Black requires a seperate set of API ID/Keys to ingest Notification alerts. Enter the `SIEMapiId` and `SIEMapiKey` values, if needed, or omit, if not required. \n> - Note: If using Azure Key Vault, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""VMware Carbon Black API Key(s)"", ""description"": ""Carbon Black API and/or SIEM Level API Key(s) are required. See the documentation to learn more about the [Carbon Black API](https://developer.carbonblack.com/reference/carbon-black-cloud/cb-defense/latest/rest-api/).\n - A Carbon Black **API** access level API ID and Key is required for [Audit](https://developer.carbonblack.com/reference/carbon-black-cloud/cb-defense/latest/rest-api/#audit-log-events) and [Event](https://developer.carbonblack.com/reference/carbon-black-cloud/platform/latest/data-forwarder-config-api/) logs. \n - A Carbon Black **SIEM** access level API ID and Key is required for [Notification](https://developer.carbonblack.com/reference/carbon-black-cloud/cb-defense/latest/rest-api/#notifications) alerts.""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name**, **Folder Name in AWS S3 Bucket** are required for Amazon S3 REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/VMwareCarbonBlack_API_FunctionApp.json","true" +"CarbonBlackEvents_CL","VMware Carbon Black Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud","azuresentinel","azure-sentinel-solution-vmwarecarbonblack","2022-06-01","","","Microsoft","Microsoft","https://support.microsoft.com/","","domains","VMwareCarbonBlack","VMware","VMware Carbon Black Cloud","The [VMware Carbon Black Cloud](https://www.vmware.com/products/carbon-black-cloud.html) connector provides the capability to ingest Carbon Black data into Microsoft Sentinel. The connector provides visibility into Audit, Notification and Event logs in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to VMware Carbon Black to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the VMware Carbon Black API**\n\n [Follow these instructions](https://developer.carbonblack.com/reference/carbon-black-cloud/authentication/#creating-an-api-key) to create an API Key.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the VMware Carbon Black connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the VMware Carbon Black API Authorization Key(s), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""This method provides an automated deployment of the VMware Carbon Black connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelcarbonblackazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelcarbonblackazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **Log Types**, **API ID(s)**, **API Key(s)**, **Carbon Black Org Key**, **S3 Bucket Name**, **AWS Access Key Id**, **AWS Secret Access Key**, **EventPrefixFolderName**,**AlertPrefixFolderName**, and validate the **URI**.\n> - Enter the URI that corresponds to your region. The complete list of API URLs can be [found here](https://community.carbonblack.com/t5/Knowledge-Base/PSC-What-URLs-are-used-to-access-the-APIs/ta-p/67346)\n - The default **Time Interval** is set to pull the last five (5) minutes of data. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion. \n - Carbon Black requires a seperate set of API ID/Keys to ingest Notification alerts. Enter the SIEM API ID/Key values or leave blank, if not required. \n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the VMware Carbon Black connector manually with Azure Functions.""}, {""title"": """", ""description"": ""**1. Create a Function App**\n\n1. From the Azure Portal, navigate to [Function App](https://portal.azure.com/#blade/HubsExtension/BrowseResource/resourceType/Microsoft.Web%2Fsites/kind/functionapp), and select **+ Add**.\n2. In the **Basics** tab, ensure Runtime stack is set to **Powershell Core**. \n3. In the **Hosting** tab, ensure the **Consumption (Serverless)** plan type is selected.\n4. Make other preferrable configuration changes, if needed, then click **Create**.""}, {""title"": """", ""description"": ""**2. Import Function App Code**\n\n1. In the newly created Function App, select **Functions** on the left pane and click **+ Add**.\n2. Select **Timer Trigger**.\n3. Enter a unique Function **Name** and modify the cron schedule, if needed. The default value is set to run the Function App every 5 minutes. (Note: the Timer trigger should match the `timeInterval` value below to prevent overlapping data), click **Create**.\n4. Click on **Code + Test** on the left pane. \n5. Copy the [Function App Code](https://aka.ms/sentinelcarbonblackazurefunctioncode) and paste into the Function App `run.ps1` editor.\n5. Click **Save**.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following thirteen to sixteen (13-16) application settings individually, with their respective string values (case-sensitive): \n\t\tapiId\n\t\tapiKey\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\turi\n\t\ttimeInterval\n\t\tCarbonBlackOrgKey\n\t\tCarbonBlackLogTypes \n\t\ts3BucketName \n\t\tEventPrefixFolderName \n\t\tAlertPrefixFolderName \n\t\tAWSAccessKeyId \n\t\tAWSSecretAccessKey \n\t\tSIEMapiId (Optional)\n\t\tSIEMapiKey (Optional)\n\t\tlogAnalyticsUri (optional) \n> - Enter the URI that corresponds to your region. The complete list of API URLs can be [found here](https://community.carbonblack.com/t5/Knowledge-Base/PSC-What-URLs-are-used-to-access-the-APIs/ta-p/67346). The `uri` value must follow the following schema: `https://.conferdeploy.net` - There is no need to add a time suffix to the URI, the Function App will dynamically append the Time Value to the URI in the proper format.\n> - Set the `timeInterval` (in minutes) to the default value of `5` to correspond to the default Timer Trigger of every `5` minutes. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly to prevent overlapping data ingestion.\n> - Carbon Black requires a seperate set of API ID/Keys to ingest Notification alerts. Enter the `SIEMapiId` and `SIEMapiKey` values, if needed, or omit, if not required. \n> - Note: If using Azure Key Vault, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""VMware Carbon Black API Key(s)"", ""description"": ""Carbon Black API and/or SIEM Level API Key(s) are required. See the documentation to learn more about the [Carbon Black API](https://developer.carbonblack.com/reference/carbon-black-cloud/cb-defense/latest/rest-api/).\n - A Carbon Black **API** access level API ID and Key is required for [Audit](https://developer.carbonblack.com/reference/carbon-black-cloud/cb-defense/latest/rest-api/#audit-log-events) and [Event](https://developer.carbonblack.com/reference/carbon-black-cloud/platform/latest/data-forwarder-config-api/) logs. \n - A Carbon Black **SIEM** access level API ID and Key is required for [Notification](https://developer.carbonblack.com/reference/carbon-black-cloud/cb-defense/latest/rest-api/#notifications) alerts.""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name**, **Folder Name in AWS S3 Bucket** are required for Amazon S3 REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/VMwareCarbonBlack_API_FunctionApp.json","true" +"CarbonBlackNotifications_CL","VMware Carbon Black Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud","azuresentinel","azure-sentinel-solution-vmwarecarbonblack","2022-06-01","","","Microsoft","Microsoft","https://support.microsoft.com/","","domains","VMwareCarbonBlack","VMware","VMware Carbon Black Cloud","The [VMware Carbon Black Cloud](https://www.vmware.com/products/carbon-black-cloud.html) connector provides the capability to ingest Carbon Black data into Microsoft Sentinel. The connector provides visibility into Audit, Notification and Event logs in Microsoft Sentinel to view dashboards, create custom alerts, and to improve monitoring and investigation capabilities.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to VMware Carbon Black to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the VMware Carbon Black API**\n\n [Follow these instructions](https://developer.carbonblack.com/reference/carbon-black-cloud/authentication/#creating-an-api-key) to create an API Key.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the VMware Carbon Black connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the VMware Carbon Black API Authorization Key(s), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""This method provides an automated deployment of the VMware Carbon Black connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelcarbonblackazuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinelcarbonblackazuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **Log Types**, **API ID(s)**, **API Key(s)**, **Carbon Black Org Key**, **S3 Bucket Name**, **AWS Access Key Id**, **AWS Secret Access Key**, **EventPrefixFolderName**,**AlertPrefixFolderName**, and validate the **URI**.\n> - Enter the URI that corresponds to your region. The complete list of API URLs can be [found here](https://community.carbonblack.com/t5/Knowledge-Base/PSC-What-URLs-are-used-to-access-the-APIs/ta-p/67346)\n - The default **Time Interval** is set to pull the last five (5) minutes of data. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly (in the function.json file, post deployment) to prevent overlapping data ingestion. \n - Carbon Black requires a seperate set of API ID/Keys to ingest Notification alerts. Enter the SIEM API ID/Key values or leave blank, if not required. \n> - Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the VMware Carbon Black connector manually with Azure Functions.""}, {""title"": """", ""description"": ""**1. Create a Function App**\n\n1. From the Azure Portal, navigate to [Function App](https://portal.azure.com/#blade/HubsExtension/BrowseResource/resourceType/Microsoft.Web%2Fsites/kind/functionapp), and select **+ Add**.\n2. In the **Basics** tab, ensure Runtime stack is set to **Powershell Core**. \n3. In the **Hosting** tab, ensure the **Consumption (Serverless)** plan type is selected.\n4. Make other preferrable configuration changes, if needed, then click **Create**.""}, {""title"": """", ""description"": ""**2. Import Function App Code**\n\n1. In the newly created Function App, select **Functions** on the left pane and click **+ Add**.\n2. Select **Timer Trigger**.\n3. Enter a unique Function **Name** and modify the cron schedule, if needed. The default value is set to run the Function App every 5 minutes. (Note: the Timer trigger should match the `timeInterval` value below to prevent overlapping data), click **Create**.\n4. Click on **Code + Test** on the left pane. \n5. Copy the [Function App Code](https://aka.ms/sentinelcarbonblackazurefunctioncode) and paste into the Function App `run.ps1` editor.\n5. Click **Save**.""}, {""title"": """", ""description"": ""**3. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following thirteen to sixteen (13-16) application settings individually, with their respective string values (case-sensitive): \n\t\tapiId\n\t\tapiKey\n\t\tworkspaceID\n\t\tworkspaceKey\n\t\turi\n\t\ttimeInterval\n\t\tCarbonBlackOrgKey\n\t\tCarbonBlackLogTypes \n\t\ts3BucketName \n\t\tEventPrefixFolderName \n\t\tAlertPrefixFolderName \n\t\tAWSAccessKeyId \n\t\tAWSSecretAccessKey \n\t\tSIEMapiId (Optional)\n\t\tSIEMapiKey (Optional)\n\t\tlogAnalyticsUri (optional) \n> - Enter the URI that corresponds to your region. The complete list of API URLs can be [found here](https://community.carbonblack.com/t5/Knowledge-Base/PSC-What-URLs-are-used-to-access-the-APIs/ta-p/67346). The `uri` value must follow the following schema: `https://.conferdeploy.net` - There is no need to add a time suffix to the URI, the Function App will dynamically append the Time Value to the URI in the proper format.\n> - Set the `timeInterval` (in minutes) to the default value of `5` to correspond to the default Timer Trigger of every `5` minutes. If the time interval needs to be modified, it is recommended to change the Function App Timer Trigger accordingly to prevent overlapping data ingestion.\n> - Carbon Black requires a seperate set of API ID/Keys to ingest Notification alerts. Enter the `SIEMapiId` and `SIEMapiKey` values, if needed, or omit, if not required. \n> - Note: If using Azure Key Vault, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""VMware Carbon Black API Key(s)"", ""description"": ""Carbon Black API and/or SIEM Level API Key(s) are required. See the documentation to learn more about the [Carbon Black API](https://developer.carbonblack.com/reference/carbon-black-cloud/cb-defense/latest/rest-api/).\n - A Carbon Black **API** access level API ID and Key is required for [Audit](https://developer.carbonblack.com/reference/carbon-black-cloud/cb-defense/latest/rest-api/#audit-log-events) and [Event](https://developer.carbonblack.com/reference/carbon-black-cloud/platform/latest/data-forwarder-config-api/) logs. \n - A Carbon Black **SIEM** access level API ID and Key is required for [Notification](https://developer.carbonblack.com/reference/carbon-black-cloud/cb-defense/latest/rest-api/#notifications) alerts.""}, {""name"": ""Amazon S3 REST API Credentials/permissions"", ""description"": ""**AWS Access Key Id**, **AWS Secret Access Key**, **AWS S3 Bucket Name**, **Folder Name in AWS S3 Bucket** are required for Amazon S3 REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/VMwareCarbonBlack_API_FunctionApp.json","true" +"ASimAuthenticationEventLogs","VMware Carbon Black Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud","azuresentinel","azure-sentinel-solution-vmwarecarbonblack","2022-06-01","","","Microsoft","Microsoft","https://support.microsoft.com/","","domains","carbonBlackAWSS3","Microsoft","VMware Carbon Black Cloud via AWS S3","The [VMware Carbon Black Cloud](https://www.vmware.com/products/carbon-black-cloud.html) via AWS S3 data connector provides the capability to ingest watchlist, alerts, auth and endpoints events via AWS S3 and stream them to ASIM normalized tables. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. AWS CloudFormation Deployment \n To configure access on AWS, two templates has been generated to set up the AWS environment to send logs from S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create) \n 2. In AWS, choose the 'Upload a template file' option and click on 'Choose file'. Select the downloaded template \n 3. Click 'Next' and 'Create stack'""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS Carbon Black resources deployment"", ""isMultiLine"": true, ""fillWith"": [""CarbonBlack""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""When deploying 'Template 2: AWS Carbon Black resources deployment' template you'll need supply a few parameters \n * **Stack Name**: A stack name of your choosing (will appear in the list of stacks in AWS)\n * **Role Name**: Must begin with 'OIDC_' prefix, has a default value. \n * **Bucket Name**: Bucket name of your choosing, if you already have an existing bucket paste the name here \n * **CreateNewBucket**: If you already have an existing bucket that you would like to use for this connector select 'false' for this option, otherwise a bucket with the name you entered in 'Bucket Name' will be created from this stack. \n * **Region**: This is the region of the AWS resources based on Carbon Black's mapping - for more information please see [Carbon Black documentation](https://developer.carbonblack.com/reference/carbon-black-cloud/integrations/data-forwarder/quick-setup/#create-a-bucket).\n * **SQSQueuePrefix**: The stack create multiple queues, this prefix will be added to each one of them. \n * **WorkspaceID**: Use the Workspace ID provided below.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""Once the deployment is complete - head to the 'Outputs' tab, you will see: Role ARN, S3 bucket and 4 SQS resources created. You will need those resources in the next step when configuring Carbon Black's data forwarders and the data connector.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Carbon Black data forwarder configuration \n After all AWS resources has been created you'll need to configure Carbon Black to forward the events to the AWS buckets for Microsoft Sentinel to ingest them. Follow [Carbon Black's documentation on how to create a 'Data Forwarders'](https://developer.carbonblack.com/reference/carbon-black-cloud/integrations/data-forwarder/quick-setup/#2-create-a-forwarder) Use the first recommended option. When asked to input a bucket name use the bucket created in the previous step. \n You will be required to add 'S3 prefix' for each forwarder, please use this mapping:""}}, {""type"": ""Markdown"", ""parameters"": {""content"": "" | Event type | S3 prefix | \n |-----------------|-----------|\n | Alert | carbon-black-cloud-forwarder/Alerts |\n | Auth Events | carbon-black-cloud-forwarder/Auth |\n | Endpoint Events | carbon-black-cloud-forwarder/Endpoint |\n | Watchlist Hit | carbon-black-cloud-forwarder/Watchlist |""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2.1. Test your data forwarder (Optional) \n To validate the data forwarder is configured as expected, in Carbon Black's portal search for the data forwarder that you just created and click on 'Test Forwarder' button under the 'Actions' column, this will generate a 'HealthCheck' file in the S3 Bucket, you should see it appear immediately.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the 'Add new collector' button, fill the required information, the ARN role and the SQS URL are created in step 1, note that you will need to enter the correct SQS URL and select the appropriate event type from the dropdown, for example if you want to ingest Alert events you will need to copy the Alerts SQS URL and select the 'Alerts' event type in the dropdown""}}]}, {""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-CarbonBlackAlertsStream"", ""text"": ""Alerts""}, {""key"": ""Custom-CarbonBlackAuthStream"", ""text"": ""Auth Events""}, {""key"": ""Custom-CarbonBlackEndpointStream"", ""text"": ""Endpoint Events""}, {""key"": ""Custom-CarbonBlackWatchlistStream"", ""text"": ""Watchlist""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Environment"", ""description"": ""You must have the following AWS resources defined and configured: S3, Simple Queue Service (SQS), IAM roles and permissions policies""}, {""name"": ""Environment"", ""description"": ""You must have the a Carbon black account and required permissions to create a Data Forwarded to AWS S3 buckets. \nFor more details visit [Carbon Black Data Forwarder Docs](https://docs.vmware.com/en/VMware-Carbon-Black-Cloud/services/carbon-black-cloud-user-guide/GUID-E8D33F72-BABB-4157-A908-D8BBDB5AF349.html)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/CarbonBlackViaAWSS3_ConnectorDefinition.json;https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/VMwareCarbonBlackCloud_ccp/CarbonBlack_DataConnectorDefination.json","false" +"ASimFileEventLogs","VMware Carbon Black Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud","azuresentinel","azure-sentinel-solution-vmwarecarbonblack","2022-06-01","","","Microsoft","Microsoft","https://support.microsoft.com/","","domains","carbonBlackAWSS3","Microsoft","VMware Carbon Black Cloud via AWS S3","The [VMware Carbon Black Cloud](https://www.vmware.com/products/carbon-black-cloud.html) via AWS S3 data connector provides the capability to ingest watchlist, alerts, auth and endpoints events via AWS S3 and stream them to ASIM normalized tables. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. AWS CloudFormation Deployment \n To configure access on AWS, two templates has been generated to set up the AWS environment to send logs from S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create) \n 2. In AWS, choose the 'Upload a template file' option and click on 'Choose file'. Select the downloaded template \n 3. Click 'Next' and 'Create stack'""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS Carbon Black resources deployment"", ""isMultiLine"": true, ""fillWith"": [""CarbonBlack""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""When deploying 'Template 2: AWS Carbon Black resources deployment' template you'll need supply a few parameters \n * **Stack Name**: A stack name of your choosing (will appear in the list of stacks in AWS)\n * **Role Name**: Must begin with 'OIDC_' prefix, has a default value. \n * **Bucket Name**: Bucket name of your choosing, if you already have an existing bucket paste the name here \n * **CreateNewBucket**: If you already have an existing bucket that you would like to use for this connector select 'false' for this option, otherwise a bucket with the name you entered in 'Bucket Name' will be created from this stack. \n * **Region**: This is the region of the AWS resources based on Carbon Black's mapping - for more information please see [Carbon Black documentation](https://developer.carbonblack.com/reference/carbon-black-cloud/integrations/data-forwarder/quick-setup/#create-a-bucket).\n * **SQSQueuePrefix**: The stack create multiple queues, this prefix will be added to each one of them. \n * **WorkspaceID**: Use the Workspace ID provided below.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""Once the deployment is complete - head to the 'Outputs' tab, you will see: Role ARN, S3 bucket and 4 SQS resources created. You will need those resources in the next step when configuring Carbon Black's data forwarders and the data connector.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Carbon Black data forwarder configuration \n After all AWS resources has been created you'll need to configure Carbon Black to forward the events to the AWS buckets for Microsoft Sentinel to ingest them. Follow [Carbon Black's documentation on how to create a 'Data Forwarders'](https://developer.carbonblack.com/reference/carbon-black-cloud/integrations/data-forwarder/quick-setup/#2-create-a-forwarder) Use the first recommended option. When asked to input a bucket name use the bucket created in the previous step. \n You will be required to add 'S3 prefix' for each forwarder, please use this mapping:""}}, {""type"": ""Markdown"", ""parameters"": {""content"": "" | Event type | S3 prefix | \n |-----------------|-----------|\n | Alert | carbon-black-cloud-forwarder/Alerts |\n | Auth Events | carbon-black-cloud-forwarder/Auth |\n | Endpoint Events | carbon-black-cloud-forwarder/Endpoint |\n | Watchlist Hit | carbon-black-cloud-forwarder/Watchlist |""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2.1. Test your data forwarder (Optional) \n To validate the data forwarder is configured as expected, in Carbon Black's portal search for the data forwarder that you just created and click on 'Test Forwarder' button under the 'Actions' column, this will generate a 'HealthCheck' file in the S3 Bucket, you should see it appear immediately.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the 'Add new collector' button, fill the required information, the ARN role and the SQS URL are created in step 1, note that you will need to enter the correct SQS URL and select the appropriate event type from the dropdown, for example if you want to ingest Alert events you will need to copy the Alerts SQS URL and select the 'Alerts' event type in the dropdown""}}]}, {""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-CarbonBlackAlertsStream"", ""text"": ""Alerts""}, {""key"": ""Custom-CarbonBlackAuthStream"", ""text"": ""Auth Events""}, {""key"": ""Custom-CarbonBlackEndpointStream"", ""text"": ""Endpoint Events""}, {""key"": ""Custom-CarbonBlackWatchlistStream"", ""text"": ""Watchlist""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Environment"", ""description"": ""You must have the following AWS resources defined and configured: S3, Simple Queue Service (SQS), IAM roles and permissions policies""}, {""name"": ""Environment"", ""description"": ""You must have the a Carbon black account and required permissions to create a Data Forwarded to AWS S3 buckets. \nFor more details visit [Carbon Black Data Forwarder Docs](https://docs.vmware.com/en/VMware-Carbon-Black-Cloud/services/carbon-black-cloud-user-guide/GUID-E8D33F72-BABB-4157-A908-D8BBDB5AF349.html)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/CarbonBlackViaAWSS3_ConnectorDefinition.json;https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/VMwareCarbonBlackCloud_ccp/CarbonBlack_DataConnectorDefination.json","false" +"ASimNetworkSessionLogs","VMware Carbon Black Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud","azuresentinel","azure-sentinel-solution-vmwarecarbonblack","2022-06-01","","","Microsoft","Microsoft","https://support.microsoft.com/","","domains","carbonBlackAWSS3","Microsoft","VMware Carbon Black Cloud via AWS S3","The [VMware Carbon Black Cloud](https://www.vmware.com/products/carbon-black-cloud.html) via AWS S3 data connector provides the capability to ingest watchlist, alerts, auth and endpoints events via AWS S3 and stream them to ASIM normalized tables. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. AWS CloudFormation Deployment \n To configure access on AWS, two templates has been generated to set up the AWS environment to send logs from S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create) \n 2. In AWS, choose the 'Upload a template file' option and click on 'Choose file'. Select the downloaded template \n 3. Click 'Next' and 'Create stack'""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS Carbon Black resources deployment"", ""isMultiLine"": true, ""fillWith"": [""CarbonBlack""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""When deploying 'Template 2: AWS Carbon Black resources deployment' template you'll need supply a few parameters \n * **Stack Name**: A stack name of your choosing (will appear in the list of stacks in AWS)\n * **Role Name**: Must begin with 'OIDC_' prefix, has a default value. \n * **Bucket Name**: Bucket name of your choosing, if you already have an existing bucket paste the name here \n * **CreateNewBucket**: If you already have an existing bucket that you would like to use for this connector select 'false' for this option, otherwise a bucket with the name you entered in 'Bucket Name' will be created from this stack. \n * **Region**: This is the region of the AWS resources based on Carbon Black's mapping - for more information please see [Carbon Black documentation](https://developer.carbonblack.com/reference/carbon-black-cloud/integrations/data-forwarder/quick-setup/#create-a-bucket).\n * **SQSQueuePrefix**: The stack create multiple queues, this prefix will be added to each one of them. \n * **WorkspaceID**: Use the Workspace ID provided below.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""Once the deployment is complete - head to the 'Outputs' tab, you will see: Role ARN, S3 bucket and 4 SQS resources created. You will need those resources in the next step when configuring Carbon Black's data forwarders and the data connector.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Carbon Black data forwarder configuration \n After all AWS resources has been created you'll need to configure Carbon Black to forward the events to the AWS buckets for Microsoft Sentinel to ingest them. Follow [Carbon Black's documentation on how to create a 'Data Forwarders'](https://developer.carbonblack.com/reference/carbon-black-cloud/integrations/data-forwarder/quick-setup/#2-create-a-forwarder) Use the first recommended option. When asked to input a bucket name use the bucket created in the previous step. \n You will be required to add 'S3 prefix' for each forwarder, please use this mapping:""}}, {""type"": ""Markdown"", ""parameters"": {""content"": "" | Event type | S3 prefix | \n |-----------------|-----------|\n | Alert | carbon-black-cloud-forwarder/Alerts |\n | Auth Events | carbon-black-cloud-forwarder/Auth |\n | Endpoint Events | carbon-black-cloud-forwarder/Endpoint |\n | Watchlist Hit | carbon-black-cloud-forwarder/Watchlist |""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2.1. Test your data forwarder (Optional) \n To validate the data forwarder is configured as expected, in Carbon Black's portal search for the data forwarder that you just created and click on 'Test Forwarder' button under the 'Actions' column, this will generate a 'HealthCheck' file in the S3 Bucket, you should see it appear immediately.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the 'Add new collector' button, fill the required information, the ARN role and the SQS URL are created in step 1, note that you will need to enter the correct SQS URL and select the appropriate event type from the dropdown, for example if you want to ingest Alert events you will need to copy the Alerts SQS URL and select the 'Alerts' event type in the dropdown""}}]}, {""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-CarbonBlackAlertsStream"", ""text"": ""Alerts""}, {""key"": ""Custom-CarbonBlackAuthStream"", ""text"": ""Auth Events""}, {""key"": ""Custom-CarbonBlackEndpointStream"", ""text"": ""Endpoint Events""}, {""key"": ""Custom-CarbonBlackWatchlistStream"", ""text"": ""Watchlist""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Environment"", ""description"": ""You must have the following AWS resources defined and configured: S3, Simple Queue Service (SQS), IAM roles and permissions policies""}, {""name"": ""Environment"", ""description"": ""You must have the a Carbon black account and required permissions to create a Data Forwarded to AWS S3 buckets. \nFor more details visit [Carbon Black Data Forwarder Docs](https://docs.vmware.com/en/VMware-Carbon-Black-Cloud/services/carbon-black-cloud-user-guide/GUID-E8D33F72-BABB-4157-A908-D8BBDB5AF349.html)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/CarbonBlackViaAWSS3_ConnectorDefinition.json;https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/VMwareCarbonBlackCloud_ccp/CarbonBlack_DataConnectorDefination.json","false" +"ASimProcessEventLogs","VMware Carbon Black Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud","azuresentinel","azure-sentinel-solution-vmwarecarbonblack","2022-06-01","","","Microsoft","Microsoft","https://support.microsoft.com/","","domains","carbonBlackAWSS3","Microsoft","VMware Carbon Black Cloud via AWS S3","The [VMware Carbon Black Cloud](https://www.vmware.com/products/carbon-black-cloud.html) via AWS S3 data connector provides the capability to ingest watchlist, alerts, auth and endpoints events via AWS S3 and stream them to ASIM normalized tables. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. AWS CloudFormation Deployment \n To configure access on AWS, two templates has been generated to set up the AWS environment to send logs from S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create) \n 2. In AWS, choose the 'Upload a template file' option and click on 'Choose file'. Select the downloaded template \n 3. Click 'Next' and 'Create stack'""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS Carbon Black resources deployment"", ""isMultiLine"": true, ""fillWith"": [""CarbonBlack""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""When deploying 'Template 2: AWS Carbon Black resources deployment' template you'll need supply a few parameters \n * **Stack Name**: A stack name of your choosing (will appear in the list of stacks in AWS)\n * **Role Name**: Must begin with 'OIDC_' prefix, has a default value. \n * **Bucket Name**: Bucket name of your choosing, if you already have an existing bucket paste the name here \n * **CreateNewBucket**: If you already have an existing bucket that you would like to use for this connector select 'false' for this option, otherwise a bucket with the name you entered in 'Bucket Name' will be created from this stack. \n * **Region**: This is the region of the AWS resources based on Carbon Black's mapping - for more information please see [Carbon Black documentation](https://developer.carbonblack.com/reference/carbon-black-cloud/integrations/data-forwarder/quick-setup/#create-a-bucket).\n * **SQSQueuePrefix**: The stack create multiple queues, this prefix will be added to each one of them. \n * **WorkspaceID**: Use the Workspace ID provided below.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""Once the deployment is complete - head to the 'Outputs' tab, you will see: Role ARN, S3 bucket and 4 SQS resources created. You will need those resources in the next step when configuring Carbon Black's data forwarders and the data connector.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Carbon Black data forwarder configuration \n After all AWS resources has been created you'll need to configure Carbon Black to forward the events to the AWS buckets for Microsoft Sentinel to ingest them. Follow [Carbon Black's documentation on how to create a 'Data Forwarders'](https://developer.carbonblack.com/reference/carbon-black-cloud/integrations/data-forwarder/quick-setup/#2-create-a-forwarder) Use the first recommended option. When asked to input a bucket name use the bucket created in the previous step. \n You will be required to add 'S3 prefix' for each forwarder, please use this mapping:""}}, {""type"": ""Markdown"", ""parameters"": {""content"": "" | Event type | S3 prefix | \n |-----------------|-----------|\n | Alert | carbon-black-cloud-forwarder/Alerts |\n | Auth Events | carbon-black-cloud-forwarder/Auth |\n | Endpoint Events | carbon-black-cloud-forwarder/Endpoint |\n | Watchlist Hit | carbon-black-cloud-forwarder/Watchlist |""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2.1. Test your data forwarder (Optional) \n To validate the data forwarder is configured as expected, in Carbon Black's portal search for the data forwarder that you just created and click on 'Test Forwarder' button under the 'Actions' column, this will generate a 'HealthCheck' file in the S3 Bucket, you should see it appear immediately.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the 'Add new collector' button, fill the required information, the ARN role and the SQS URL are created in step 1, note that you will need to enter the correct SQS URL and select the appropriate event type from the dropdown, for example if you want to ingest Alert events you will need to copy the Alerts SQS URL and select the 'Alerts' event type in the dropdown""}}]}, {""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-CarbonBlackAlertsStream"", ""text"": ""Alerts""}, {""key"": ""Custom-CarbonBlackAuthStream"", ""text"": ""Auth Events""}, {""key"": ""Custom-CarbonBlackEndpointStream"", ""text"": ""Endpoint Events""}, {""key"": ""Custom-CarbonBlackWatchlistStream"", ""text"": ""Watchlist""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Environment"", ""description"": ""You must have the following AWS resources defined and configured: S3, Simple Queue Service (SQS), IAM roles and permissions policies""}, {""name"": ""Environment"", ""description"": ""You must have the a Carbon black account and required permissions to create a Data Forwarded to AWS S3 buckets. \nFor more details visit [Carbon Black Data Forwarder Docs](https://docs.vmware.com/en/VMware-Carbon-Black-Cloud/services/carbon-black-cloud-user-guide/GUID-E8D33F72-BABB-4157-A908-D8BBDB5AF349.html)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/CarbonBlackViaAWSS3_ConnectorDefinition.json;https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/VMwareCarbonBlackCloud_ccp/CarbonBlack_DataConnectorDefination.json","false" +"ASimRegistryEventLogs","VMware Carbon Black Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud","azuresentinel","azure-sentinel-solution-vmwarecarbonblack","2022-06-01","","","Microsoft","Microsoft","https://support.microsoft.com/","","domains","carbonBlackAWSS3","Microsoft","VMware Carbon Black Cloud via AWS S3","The [VMware Carbon Black Cloud](https://www.vmware.com/products/carbon-black-cloud.html) via AWS S3 data connector provides the capability to ingest watchlist, alerts, auth and endpoints events via AWS S3 and stream them to ASIM normalized tables. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. AWS CloudFormation Deployment \n To configure access on AWS, two templates has been generated to set up the AWS environment to send logs from S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create) \n 2. In AWS, choose the 'Upload a template file' option and click on 'Choose file'. Select the downloaded template \n 3. Click 'Next' and 'Create stack'""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS Carbon Black resources deployment"", ""isMultiLine"": true, ""fillWith"": [""CarbonBlack""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""When deploying 'Template 2: AWS Carbon Black resources deployment' template you'll need supply a few parameters \n * **Stack Name**: A stack name of your choosing (will appear in the list of stacks in AWS)\n * **Role Name**: Must begin with 'OIDC_' prefix, has a default value. \n * **Bucket Name**: Bucket name of your choosing, if you already have an existing bucket paste the name here \n * **CreateNewBucket**: If you already have an existing bucket that you would like to use for this connector select 'false' for this option, otherwise a bucket with the name you entered in 'Bucket Name' will be created from this stack. \n * **Region**: This is the region of the AWS resources based on Carbon Black's mapping - for more information please see [Carbon Black documentation](https://developer.carbonblack.com/reference/carbon-black-cloud/integrations/data-forwarder/quick-setup/#create-a-bucket).\n * **SQSQueuePrefix**: The stack create multiple queues, this prefix will be added to each one of them. \n * **WorkspaceID**: Use the Workspace ID provided below.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""Once the deployment is complete - head to the 'Outputs' tab, you will see: Role ARN, S3 bucket and 4 SQS resources created. You will need those resources in the next step when configuring Carbon Black's data forwarders and the data connector.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Carbon Black data forwarder configuration \n After all AWS resources has been created you'll need to configure Carbon Black to forward the events to the AWS buckets for Microsoft Sentinel to ingest them. Follow [Carbon Black's documentation on how to create a 'Data Forwarders'](https://developer.carbonblack.com/reference/carbon-black-cloud/integrations/data-forwarder/quick-setup/#2-create-a-forwarder) Use the first recommended option. When asked to input a bucket name use the bucket created in the previous step. \n You will be required to add 'S3 prefix' for each forwarder, please use this mapping:""}}, {""type"": ""Markdown"", ""parameters"": {""content"": "" | Event type | S3 prefix | \n |-----------------|-----------|\n | Alert | carbon-black-cloud-forwarder/Alerts |\n | Auth Events | carbon-black-cloud-forwarder/Auth |\n | Endpoint Events | carbon-black-cloud-forwarder/Endpoint |\n | Watchlist Hit | carbon-black-cloud-forwarder/Watchlist |""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2.1. Test your data forwarder (Optional) \n To validate the data forwarder is configured as expected, in Carbon Black's portal search for the data forwarder that you just created and click on 'Test Forwarder' button under the 'Actions' column, this will generate a 'HealthCheck' file in the S3 Bucket, you should see it appear immediately.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the 'Add new collector' button, fill the required information, the ARN role and the SQS URL are created in step 1, note that you will need to enter the correct SQS URL and select the appropriate event type from the dropdown, for example if you want to ingest Alert events you will need to copy the Alerts SQS URL and select the 'Alerts' event type in the dropdown""}}]}, {""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-CarbonBlackAlertsStream"", ""text"": ""Alerts""}, {""key"": ""Custom-CarbonBlackAuthStream"", ""text"": ""Auth Events""}, {""key"": ""Custom-CarbonBlackEndpointStream"", ""text"": ""Endpoint Events""}, {""key"": ""Custom-CarbonBlackWatchlistStream"", ""text"": ""Watchlist""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Environment"", ""description"": ""You must have the following AWS resources defined and configured: S3, Simple Queue Service (SQS), IAM roles and permissions policies""}, {""name"": ""Environment"", ""description"": ""You must have the a Carbon black account and required permissions to create a Data Forwarded to AWS S3 buckets. \nFor more details visit [Carbon Black Data Forwarder Docs](https://docs.vmware.com/en/VMware-Carbon-Black-Cloud/services/carbon-black-cloud-user-guide/GUID-E8D33F72-BABB-4157-A908-D8BBDB5AF349.html)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/CarbonBlackViaAWSS3_ConnectorDefinition.json;https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/VMwareCarbonBlackCloud_ccp/CarbonBlack_DataConnectorDefination.json","false" +"CarbonBlack_Alerts_CL","VMware Carbon Black Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud","azuresentinel","azure-sentinel-solution-vmwarecarbonblack","2022-06-01","","","Microsoft","Microsoft","https://support.microsoft.com/","","domains","carbonBlackAWSS3","Microsoft","VMware Carbon Black Cloud via AWS S3","The [VMware Carbon Black Cloud](https://www.vmware.com/products/carbon-black-cloud.html) via AWS S3 data connector provides the capability to ingest watchlist, alerts, auth and endpoints events via AWS S3 and stream them to ASIM normalized tables. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. AWS CloudFormation Deployment \n To configure access on AWS, two templates has been generated to set up the AWS environment to send logs from S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create) \n 2. In AWS, choose the 'Upload a template file' option and click on 'Choose file'. Select the downloaded template \n 3. Click 'Next' and 'Create stack'""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS Carbon Black resources deployment"", ""isMultiLine"": true, ""fillWith"": [""CarbonBlack""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""When deploying 'Template 2: AWS Carbon Black resources deployment' template you'll need supply a few parameters \n * **Stack Name**: A stack name of your choosing (will appear in the list of stacks in AWS)\n * **Role Name**: Must begin with 'OIDC_' prefix, has a default value. \n * **Bucket Name**: Bucket name of your choosing, if you already have an existing bucket paste the name here \n * **CreateNewBucket**: If you already have an existing bucket that you would like to use for this connector select 'false' for this option, otherwise a bucket with the name you entered in 'Bucket Name' will be created from this stack. \n * **Region**: This is the region of the AWS resources based on Carbon Black's mapping - for more information please see [Carbon Black documentation](https://developer.carbonblack.com/reference/carbon-black-cloud/integrations/data-forwarder/quick-setup/#create-a-bucket).\n * **SQSQueuePrefix**: The stack create multiple queues, this prefix will be added to each one of them. \n * **WorkspaceID**: Use the Workspace ID provided below.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""Once the deployment is complete - head to the 'Outputs' tab, you will see: Role ARN, S3 bucket and 4 SQS resources created. You will need those resources in the next step when configuring Carbon Black's data forwarders and the data connector.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Carbon Black data forwarder configuration \n After all AWS resources has been created you'll need to configure Carbon Black to forward the events to the AWS buckets for Microsoft Sentinel to ingest them. Follow [Carbon Black's documentation on how to create a 'Data Forwarders'](https://developer.carbonblack.com/reference/carbon-black-cloud/integrations/data-forwarder/quick-setup/#2-create-a-forwarder) Use the first recommended option. When asked to input a bucket name use the bucket created in the previous step. \n You will be required to add 'S3 prefix' for each forwarder, please use this mapping:""}}, {""type"": ""Markdown"", ""parameters"": {""content"": "" | Event type | S3 prefix | \n |-----------------|-----------|\n | Alert | carbon-black-cloud-forwarder/Alerts |\n | Auth Events | carbon-black-cloud-forwarder/Auth |\n | Endpoint Events | carbon-black-cloud-forwarder/Endpoint |\n | Watchlist Hit | carbon-black-cloud-forwarder/Watchlist |""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2.1. Test your data forwarder (Optional) \n To validate the data forwarder is configured as expected, in Carbon Black's portal search for the data forwarder that you just created and click on 'Test Forwarder' button under the 'Actions' column, this will generate a 'HealthCheck' file in the S3 Bucket, you should see it appear immediately.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the 'Add new collector' button, fill the required information, the ARN role and the SQS URL are created in step 1, note that you will need to enter the correct SQS URL and select the appropriate event type from the dropdown, for example if you want to ingest Alert events you will need to copy the Alerts SQS URL and select the 'Alerts' event type in the dropdown""}}]}, {""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-CarbonBlackAlertsStream"", ""text"": ""Alerts""}, {""key"": ""Custom-CarbonBlackAuthStream"", ""text"": ""Auth Events""}, {""key"": ""Custom-CarbonBlackEndpointStream"", ""text"": ""Endpoint Events""}, {""key"": ""Custom-CarbonBlackWatchlistStream"", ""text"": ""Watchlist""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Environment"", ""description"": ""You must have the following AWS resources defined and configured: S3, Simple Queue Service (SQS), IAM roles and permissions policies""}, {""name"": ""Environment"", ""description"": ""You must have the a Carbon black account and required permissions to create a Data Forwarded to AWS S3 buckets. \nFor more details visit [Carbon Black Data Forwarder Docs](https://docs.vmware.com/en/VMware-Carbon-Black-Cloud/services/carbon-black-cloud-user-guide/GUID-E8D33F72-BABB-4157-A908-D8BBDB5AF349.html)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/CarbonBlackViaAWSS3_ConnectorDefinition.json;https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/VMwareCarbonBlackCloud_ccp/CarbonBlack_DataConnectorDefination.json","false" +"CarbonBlack_Watchlist_CL","VMware Carbon Black Cloud","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud","azuresentinel","azure-sentinel-solution-vmwarecarbonblack","2022-06-01","","","Microsoft","Microsoft","https://support.microsoft.com/","","domains","carbonBlackAWSS3","Microsoft","VMware Carbon Black Cloud via AWS S3","The [VMware Carbon Black Cloud](https://www.vmware.com/products/carbon-black-cloud.html) via AWS S3 data connector provides the capability to ingest watchlist, alerts, auth and endpoints events via AWS S3 and stream them to ASIM normalized tables. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### 1. AWS CloudFormation Deployment \n To configure access on AWS, two templates has been generated to set up the AWS environment to send logs from S3 bucket to your Log Analytics Workspace.\n #### For each template, create Stack in AWS: \n 1. Go to [AWS CloudFormation Stacks](https://aka.ms/awsCloudFormationLink#/stacks/create) \n 2. In AWS, choose the 'Upload a template file' option and click on 'Choose file'. Select the downloaded template \n 3. Click 'Next' and 'Create stack'""}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 1: OpenID connect authentication deployment"", ""isMultiLine"": true, ""fillWith"": [""Oidc""]}}, {""type"": ""CopyableLabel"", ""parameters"": {""label"": ""Template 2: AWS Carbon Black resources deployment"", ""isMultiLine"": true, ""fillWith"": [""CarbonBlack""]}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""When deploying 'Template 2: AWS Carbon Black resources deployment' template you'll need supply a few parameters \n * **Stack Name**: A stack name of your choosing (will appear in the list of stacks in AWS)\n * **Role Name**: Must begin with 'OIDC_' prefix, has a default value. \n * **Bucket Name**: Bucket name of your choosing, if you already have an existing bucket paste the name here \n * **CreateNewBucket**: If you already have an existing bucket that you would like to use for this connector select 'false' for this option, otherwise a bucket with the name you entered in 'Bucket Name' will be created from this stack. \n * **Region**: This is the region of the AWS resources based on Carbon Black's mapping - for more information please see [Carbon Black documentation](https://developer.carbonblack.com/reference/carbon-black-cloud/integrations/data-forwarder/quick-setup/#create-a-bucket).\n * **SQSQueuePrefix**: The stack create multiple queues, this prefix will be added to each one of them. \n * **WorkspaceID**: Use the Workspace ID provided below.""}}, {""type"": ""CopyableLabel"", ""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""Once the deployment is complete - head to the 'Outputs' tab, you will see: Role ARN, S3 bucket and 4 SQS resources created. You will need those resources in the next step when configuring Carbon Black's data forwarders and the data connector.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2. Carbon Black data forwarder configuration \n After all AWS resources has been created you'll need to configure Carbon Black to forward the events to the AWS buckets for Microsoft Sentinel to ingest them. Follow [Carbon Black's documentation on how to create a 'Data Forwarders'](https://developer.carbonblack.com/reference/carbon-black-cloud/integrations/data-forwarder/quick-setup/#2-create-a-forwarder) Use the first recommended option. When asked to input a bucket name use the bucket created in the previous step. \n You will be required to add 'S3 prefix' for each forwarder, please use this mapping:""}}, {""type"": ""Markdown"", ""parameters"": {""content"": "" | Event type | S3 prefix | \n |-----------------|-----------|\n | Alert | carbon-black-cloud-forwarder/Alerts |\n | Auth Events | carbon-black-cloud-forwarder/Auth |\n | Endpoint Events | carbon-black-cloud-forwarder/Endpoint |\n | Watchlist Hit | carbon-black-cloud-forwarder/Watchlist |""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 2.1. Test your data forwarder (Optional) \n To validate the data forwarder is configured as expected, in Carbon Black's portal search for the data forwarder that you just created and click on 'Test Forwarder' button under the 'Actions' column, this will generate a 'HealthCheck' file in the S3 Bucket, you should see it appear immediately.""}}, {""type"": ""Markdown"", ""parameters"": {""content"": ""#### 3. Connect new collectors \n To enable AWS S3 for Microsoft Sentinel, click the 'Add new collector' button, fill the required information, the ARN role and the SQS URL are created in step 1, note that you will need to enter the correct SQS URL and select the appropriate event type from the dropdown, for example if you want to ingest Alert events you will need to copy the Alerts SQS URL and select the 'Alerts' event type in the dropdown""}}]}, {""instructions"": [{""type"": ""DataConnectorsGrid"", ""parameters"": {""mapping"": [{""columnValue"": ""properties.roleArn"", ""columnName"": ""Role ARN""}, {""columnValue"": ""properties.sqsUrls[0]"", ""columnName"": ""Queue URL""}, {""columnValue"": ""properties.dcrConfig.streamName"", ""columnName"": ""Stream name""}], ""menuItems"": [""DeleteConnector""]}}, {""type"": ""ContextPane"", ""parameters"": {""contextPaneType"": ""DataConnectorsContextPane"", ""title"": ""Add new controller"", ""subtitle"": ""AWS S3 connector"", ""label"": ""Add new collector"", ""instructionSteps"": [{""title"": ""Account details"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Role ARN"", ""type"": ""text"", ""name"": ""roleArn"", ""validations"": {""required"": true}}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Queue URL"", ""type"": ""text"", ""name"": ""queueUrl"", ""validations"": {""required"": true}}}, {""type"": ""Dropdown"", ""parameters"": {""label"": ""Data type"", ""type"": ""text"", ""name"": ""streamName"", ""required"": true, ""placeholder"": ""Select a data type"", ""options"": [{""key"": ""Custom-CarbonBlackAlertsStream"", ""text"": ""Alerts""}, {""key"": ""Custom-CarbonBlackAuthStream"", ""text"": ""Auth Events""}, {""key"": ""Custom-CarbonBlackEndpointStream"", ""text"": ""Endpoint Events""}, {""key"": ""Custom-CarbonBlackWatchlistStream"", ""text"": ""Watchlist""}]}}]}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}], ""customs"": [{""name"": ""Environment"", ""description"": ""You must have the following AWS resources defined and configured: S3, Simple Queue Service (SQS), IAM roles and permissions policies""}, {""name"": ""Environment"", ""description"": ""You must have the a Carbon black account and required permissions to create a Data Forwarded to AWS S3 buckets. \nFor more details visit [Carbon Black Data Forwarder Docs](https://docs.vmware.com/en/VMware-Carbon-Black-Cloud/services/carbon-black-cloud-user-guide/GUID-E8D33F72-BABB-4157-A908-D8BBDB5AF349.html)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/CarbonBlackViaAWSS3_ConnectorDefinition.json;https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20Carbon%20Black%20Cloud/Data%20Connectors/VMwareCarbonBlackCloud_ccp/CarbonBlack_DataConnectorDefination.json","false" +"VMware_CWS_DLPLogs_CL","VMware SD-WAN and SASE","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20SD-WAN%20and%20SASE","velocloud","azure-sentinel-solution-vmwaresase","2023-12-31","","","VMware by Broadcom","Partner","https://developer.vmware.com/","","domains","VMwareSDWAN","VMware by Broadcom","VMware SD-WAN and SASE Connector","The [VMware SD-WAN & SASE](https://sase.vmware.com) data connector offers the capability to ingest VMware SD-WAN and CWS events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://developer.vmware.com/apis/vmware-sase-platform/) for more information. The connector provides ability to get events which helps to examine potential network security issues, identify misconfigured network devices and monitor SD-WAN and SASE usage. If you have your own custom connector, make sure that the connector is deployed under an isolated Log Analytics Workspace first. In case of issues, questions or feature requests, please contact us via email on sase-siem-integration@vmware.com.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the VMware Edge Cloud Orchestrator REST API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the VECO API**\n\n [Follow the instructions](https://docs.vmware.com/en/VMware-SD-WAN/5.3/VMware-SD-WAN-Administration-Guide/GUID-2FA3763F-835C-4D10-A32B-450FEB5397D8.html) to create and obtain the credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function.**""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the VMware SD-WAN and SASE Connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelvmwaresdwan)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**.\n3. Enter or modify the Function App, Log Analytics and Azure Monitor settings, enter the VECO FQDN (without https://, for example vco123-usvi1.velocloud.net), enter the API token created (including \""Token \"" at the beginning of the string), and adjust your desired Function App freaquency, then deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the VMware SD-WAN and SASE Connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-vmwaresdwan-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. vmwsase-siemXXXXXXXXXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.10.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab .\n3. Check if the application has these settings defined correctly and adjust if needed: \n\t\tapi_veco_authorization\n\t\tapi_veco_fqdn\n\t\tapp_frequency_mins\n\t\tazsa_share_connectionstring\n\t\tazsa_share_name dce_endpoint\n\t\tdcr_cwsdlplog_immutableid\n\t\tdcr_cwshealth_immutableid\n\t\tdcr_cwsweblog_immutableid\n\t\tdcr_efsfwlog_immutableid\n\t\tdcr_efshealth_immutableid\n\t\tdcr_saseaudit_immutableid\n\t\tstream_cwsdlplog\n\t\tstream_cwshealth\n\t\tstream_cwsweblog\n\t\tstream_efsfwlog\n\t\tstream_efshealth\n\t\tstream_saseaudit\n3. In case you made changes to application settings have been entered, make sure that you click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**api_veco_authorization**, **api_veco_fqdn** is required for REST API. [See the documentation to learn more about VMware SASE APIs](https://developer.vmware.com/apis/vmware-sase-platform/). Check all [requirements and follow the instructions](https://docs.vmware.com/en/VMware-SD-WAN/5.3/VMware-SD-WAN-Administration-Guide/GUID-2FA3763F-835C-4D10-A32B-450FEB5397D8.html) for obtaining credentials. The Function App only supports token-based API authentication. Be advised that the API Token generated will inherit the access rights of the user account under which it was generated.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20SD-WAN%20and%20SASE/Data%20Connectors/Function%20App%20Connector/VMwareSASE_API_FunctionApp.json","true" +"VMware_CWS_Health_CL","VMware SD-WAN and SASE","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20SD-WAN%20and%20SASE","velocloud","azure-sentinel-solution-vmwaresase","2023-12-31","","","VMware by Broadcom","Partner","https://developer.vmware.com/","","domains","VMwareSDWAN","VMware by Broadcom","VMware SD-WAN and SASE Connector","The [VMware SD-WAN & SASE](https://sase.vmware.com) data connector offers the capability to ingest VMware SD-WAN and CWS events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://developer.vmware.com/apis/vmware-sase-platform/) for more information. The connector provides ability to get events which helps to examine potential network security issues, identify misconfigured network devices and monitor SD-WAN and SASE usage. If you have your own custom connector, make sure that the connector is deployed under an isolated Log Analytics Workspace first. In case of issues, questions or feature requests, please contact us via email on sase-siem-integration@vmware.com.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the VMware Edge Cloud Orchestrator REST API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the VECO API**\n\n [Follow the instructions](https://docs.vmware.com/en/VMware-SD-WAN/5.3/VMware-SD-WAN-Administration-Guide/GUID-2FA3763F-835C-4D10-A32B-450FEB5397D8.html) to create and obtain the credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function.**""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the VMware SD-WAN and SASE Connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelvmwaresdwan)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**.\n3. Enter or modify the Function App, Log Analytics and Azure Monitor settings, enter the VECO FQDN (without https://, for example vco123-usvi1.velocloud.net), enter the API token created (including \""Token \"" at the beginning of the string), and adjust your desired Function App freaquency, then deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the VMware SD-WAN and SASE Connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-vmwaresdwan-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. vmwsase-siemXXXXXXXXXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.10.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab .\n3. Check if the application has these settings defined correctly and adjust if needed: \n\t\tapi_veco_authorization\n\t\tapi_veco_fqdn\n\t\tapp_frequency_mins\n\t\tazsa_share_connectionstring\n\t\tazsa_share_name dce_endpoint\n\t\tdcr_cwsdlplog_immutableid\n\t\tdcr_cwshealth_immutableid\n\t\tdcr_cwsweblog_immutableid\n\t\tdcr_efsfwlog_immutableid\n\t\tdcr_efshealth_immutableid\n\t\tdcr_saseaudit_immutableid\n\t\tstream_cwsdlplog\n\t\tstream_cwshealth\n\t\tstream_cwsweblog\n\t\tstream_efsfwlog\n\t\tstream_efshealth\n\t\tstream_saseaudit\n3. In case you made changes to application settings have been entered, make sure that you click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**api_veco_authorization**, **api_veco_fqdn** is required for REST API. [See the documentation to learn more about VMware SASE APIs](https://developer.vmware.com/apis/vmware-sase-platform/). Check all [requirements and follow the instructions](https://docs.vmware.com/en/VMware-SD-WAN/5.3/VMware-SD-WAN-Administration-Guide/GUID-2FA3763F-835C-4D10-A32B-450FEB5397D8.html) for obtaining credentials. The Function App only supports token-based API authentication. Be advised that the API Token generated will inherit the access rights of the user account under which it was generated.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20SD-WAN%20and%20SASE/Data%20Connectors/Function%20App%20Connector/VMwareSASE_API_FunctionApp.json","true" +"VMware_CWS_Weblogs_CL","VMware SD-WAN and SASE","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20SD-WAN%20and%20SASE","velocloud","azure-sentinel-solution-vmwaresase","2023-12-31","","","VMware by Broadcom","Partner","https://developer.vmware.com/","","domains","VMwareSDWAN","VMware by Broadcom","VMware SD-WAN and SASE Connector","The [VMware SD-WAN & SASE](https://sase.vmware.com) data connector offers the capability to ingest VMware SD-WAN and CWS events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://developer.vmware.com/apis/vmware-sase-platform/) for more information. The connector provides ability to get events which helps to examine potential network security issues, identify misconfigured network devices and monitor SD-WAN and SASE usage. If you have your own custom connector, make sure that the connector is deployed under an isolated Log Analytics Workspace first. In case of issues, questions or feature requests, please contact us via email on sase-siem-integration@vmware.com.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the VMware Edge Cloud Orchestrator REST API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the VECO API**\n\n [Follow the instructions](https://docs.vmware.com/en/VMware-SD-WAN/5.3/VMware-SD-WAN-Administration-Guide/GUID-2FA3763F-835C-4D10-A32B-450FEB5397D8.html) to create and obtain the credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function.**""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the VMware SD-WAN and SASE Connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelvmwaresdwan)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**.\n3. Enter or modify the Function App, Log Analytics and Azure Monitor settings, enter the VECO FQDN (without https://, for example vco123-usvi1.velocloud.net), enter the API token created (including \""Token \"" at the beginning of the string), and adjust your desired Function App freaquency, then deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the VMware SD-WAN and SASE Connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-vmwaresdwan-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. vmwsase-siemXXXXXXXXXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.10.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab .\n3. Check if the application has these settings defined correctly and adjust if needed: \n\t\tapi_veco_authorization\n\t\tapi_veco_fqdn\n\t\tapp_frequency_mins\n\t\tazsa_share_connectionstring\n\t\tazsa_share_name dce_endpoint\n\t\tdcr_cwsdlplog_immutableid\n\t\tdcr_cwshealth_immutableid\n\t\tdcr_cwsweblog_immutableid\n\t\tdcr_efsfwlog_immutableid\n\t\tdcr_efshealth_immutableid\n\t\tdcr_saseaudit_immutableid\n\t\tstream_cwsdlplog\n\t\tstream_cwshealth\n\t\tstream_cwsweblog\n\t\tstream_efsfwlog\n\t\tstream_efshealth\n\t\tstream_saseaudit\n3. In case you made changes to application settings have been entered, make sure that you click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**api_veco_authorization**, **api_veco_fqdn** is required for REST API. [See the documentation to learn more about VMware SASE APIs](https://developer.vmware.com/apis/vmware-sase-platform/). Check all [requirements and follow the instructions](https://docs.vmware.com/en/VMware-SD-WAN/5.3/VMware-SD-WAN-Administration-Guide/GUID-2FA3763F-835C-4D10-A32B-450FEB5397D8.html) for obtaining credentials. The Function App only supports token-based API authentication. Be advised that the API Token generated will inherit the access rights of the user account under which it was generated.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20SD-WAN%20and%20SASE/Data%20Connectors/Function%20App%20Connector/VMwareSASE_API_FunctionApp.json","true" +"VMware_VECO_EventLogs_CL","VMware SD-WAN and SASE","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20SD-WAN%20and%20SASE","velocloud","azure-sentinel-solution-vmwaresase","2023-12-31","","","VMware by Broadcom","Partner","https://developer.vmware.com/","","domains","VMwareSDWAN","VMware by Broadcom","VMware SD-WAN and SASE Connector","The [VMware SD-WAN & SASE](https://sase.vmware.com) data connector offers the capability to ingest VMware SD-WAN and CWS events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://developer.vmware.com/apis/vmware-sase-platform/) for more information. The connector provides ability to get events which helps to examine potential network security issues, identify misconfigured network devices and monitor SD-WAN and SASE usage. If you have your own custom connector, make sure that the connector is deployed under an isolated Log Analytics Workspace first. In case of issues, questions or feature requests, please contact us via email on sase-siem-integration@vmware.com.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the VMware Edge Cloud Orchestrator REST API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the VECO API**\n\n [Follow the instructions](https://docs.vmware.com/en/VMware-SD-WAN/5.3/VMware-SD-WAN-Administration-Guide/GUID-2FA3763F-835C-4D10-A32B-450FEB5397D8.html) to create and obtain the credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function.**""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the VMware SD-WAN and SASE Connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinelvmwaresdwan)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**.\n3. Enter or modify the Function App, Log Analytics and Azure Monitor settings, enter the VECO FQDN (without https://, for example vco123-usvi1.velocloud.net), enter the API token created (including \""Token \"" at the beginning of the string), and adjust your desired Function App freaquency, then deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the VMware SD-WAN and SASE Connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-vmwaresdwan-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. vmwsase-siemXXXXXXXXXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.10.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab .\n3. Check if the application has these settings defined correctly and adjust if needed: \n\t\tapi_veco_authorization\n\t\tapi_veco_fqdn\n\t\tapp_frequency_mins\n\t\tazsa_share_connectionstring\n\t\tazsa_share_name dce_endpoint\n\t\tdcr_cwsdlplog_immutableid\n\t\tdcr_cwshealth_immutableid\n\t\tdcr_cwsweblog_immutableid\n\t\tdcr_efsfwlog_immutableid\n\t\tdcr_efshealth_immutableid\n\t\tdcr_saseaudit_immutableid\n\t\tstream_cwsdlplog\n\t\tstream_cwshealth\n\t\tstream_cwsweblog\n\t\tstream_efsfwlog\n\t\tstream_efshealth\n\t\tstream_saseaudit\n3. In case you made changes to application settings have been entered, make sure that you click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**api_veco_authorization**, **api_veco_fqdn** is required for REST API. [See the documentation to learn more about VMware SASE APIs](https://developer.vmware.com/apis/vmware-sase-platform/). Check all [requirements and follow the instructions](https://docs.vmware.com/en/VMware-SD-WAN/5.3/VMware-SD-WAN-Administration-Guide/GUID-2FA3763F-835C-4D10-A32B-450FEB5397D8.html) for obtaining credentials. The Function App only supports token-based API authentication. Be advised that the API Token generated will inherit the access rights of the user account under which it was generated.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20SD-WAN%20and%20SASE/Data%20Connectors/Function%20App%20Connector/VMwareSASE_API_FunctionApp.json","true" +"vcenter_CL","VMware vCenter","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20vCenter","azuresentinel","azure-sentinel-solution-vcenter","2022-06-29","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","VMwarevCenter","VMware","[Deprecated] VMware vCenter","The [vCenter](https://www.vmware.com/in/products/vcenter-server.html) connector allows you to easily connect your vCenter server logs with Microsoft Sentinel. This gives you more insight into your organization's data centers and improves your security operation capabilities.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias VMware vCenter and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20vCenter/Parsers/vCenter.txt), on the second line of the query, enter the hostname(s) of your VMware vCenter device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update. \n> 1. If you have not installed the vCenter solution from ContentHub then [Follow the steps](https://aka.ms/sentinel-vCenter-parser) to use the Kusto function alias, **vCenter**"", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Follow the configuration steps below to get vCenter server logs into Microsoft Sentinel. Refer to the [Azure Monitor Documentation](https://docs.microsoft.com/azure/azure-monitor/agents/data-sources-json) for more details on these steps.\n For vCenter Server logs, we have issues while parsing the data by OMS agent data using default settings. \nSo we advice to capture the logs into custom table **vcenter_CL** using below instructions. \n1. Login to the server where you have installed OMS agent.\n2. Download config file vCenter.conf \n\t\twget -v https://aka.ms/sentinel-vcenteroms-conf -O vcenter.conf \n3. Copy vcenter.conf to the /etc/opt/microsoft/omsagent/**workspace_id**/conf/omsagent.d/ folder. \n\t\tcp vcenter.conf /etc/opt/microsoft/omsagent/<>/conf/omsagent.d/\n4. Edit vcenter.conf as follows:\n\n\t a. vcenter.conf uses the port **22033** by default. Ensure this port is not being used by any other source on your server\n\n\t b. If you would like to change the default port for **vcenter.conf** make sure that you dont use default Azure monotoring /log analytic agent ports I.e.(For example CEF uses TCP port **25226** or **25224**) \n\n\t c. replace **workspace_id** with real value of your Workspace ID (lines 13,14,15,18)\n5. Save changes and restart the Azure Log Analytics agent for Linux service with the following command:\n\t\tsudo /opt/microsoft/omsagent/bin/service_control restart\n6. Modify /etc/rsyslog.conf file - add below template preferably at the beginning / before directives section \n\n\t\t$template vcenter,\""%timestamp% %hostname% %msg%\\ n\"" \n\n **Note - There is no space between slash(\\\\) and character 'n' in above command.**\n\n 7. Create a custom conf file in /etc/rsyslog.d/ for example 10-vcenter.conf and add following filter conditions.\n\nDownload config file [10-vCenter.conf](https://aka.ms/sentinel-vcenter-conf)\n\n\t With an added statement you will need to create a filter which will specify the logs coming from the vcenter server to be forwarded to the custom table.\n\n\t reference: [Filter Conditions \u2014 rsyslog 8.18.0.master documentation](https://rsyslog.readthedocs.io/en/latest/configuration/filters.html)\n\n\t Here is an example of filtering that can be defined, this is not complete and will require additional testing for each installation.\n\t\t if $rawmsg contains \""vcenter-server\"" then @@127.0.0.1:22033;vcenter\n\t\t & stop \n\t\t if $rawmsg contains \""vpxd\"" then @@127.0.0.1:22033;vcenter\n\t\t & stop\n\t\t \n8. Restart rsyslog\n\t\t systemctl restart rsyslog"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Configure and connect the vCenter device(s)"", ""description"": ""[Follow these instructions](https://docs.vmware.com/en/VMware-vSphere/7.0/com.vmware.vsphere.monitoring.doc/GUID-9633A961-A5C3-4658-B099-B81E0512DC21.html) to configure the vCenter to forward syslog. Use the IP address or hostname for the Linux device with the Linux agent installed as the Destination IP address.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Include custom pre-requisites if the connectivity requires - else delete customs"", ""description"": ""Description for any custom pre-requisite""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VMware%20vCenter/Data%20Connectors/Connector_Syslog_vcenter.json","true" +"ValenceAlert_CL","Valence Security","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Valence%20Security","valencesecurityinc1673598943514","valence_sentinel_solution","2023-11-20","","","Valence Security","Partner","https://www.valencesecurity.com/","","domains","ValenceSecurity","Valence Security","SaaS Security","Connects the Valence SaaS security platform Azure Log Analytics via the REST API interface.","[{""title"": ""Step 1 : Read the detailed documentation"", ""description"": ""The installation process is documented in great detail in [Valence Security's knowledge base](https://support.valencesecurity.com). The user should consult this documentation further to understand installation and debug of the integration.""}, {""title"": ""Step 2: Retrieve the workspace access credentials"", ""description"": ""The first installation step is to retrieve both your **Workspace ID** and **Primary Key** from the Microsoft Sentinel platform.\nCopy the values shown below and save them for configuration of the API log forwarder integration."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Step 3: Configure Sentinel integration on the Valence Security Platform"", ""description"": ""As a Valence Security Platform admin, go to the [configuration screen](https://app.valencesecurity.com/settings/configuration), click Connect in the SIEM Integration card, and choose Microsoft Sentinel. Paste the values from the previous step and click Connect. Valence will test the connection so when success is reported, the connection worked.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Valence%20Security/Data%20Connectors/ValenceSecurity.json","true" +"varonisresources_CL","Varonis Purview","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Varonis%20Purview","varonis","azure-sentinel-solution-varonispurview","2025-10-27","2025-10-01","","Varonis","Partner","https://www.varonis.com/resources/support","","domains","VaronisPurviewPush","Varonis","Varonis Purview Push Connector","The [Varonis Purview](https://www.varonis.com/) connector provides the capability to sync resources from Varonis to Microsoft Purview.","[{""title"": ""1. Run this to setup ingestion for Varonis Resoources"", ""description"": ""This will create the necessary Log Analytics tables, Data Collection Rule (DCR), and an Entra application to securely send data to the DCR."", ""instructions"": [{""type"": ""Markdown"", ""parameters"": {""content"": ""#### Automated Configuration and Secure Data Ingestion with Entra Application \nClicking on \""Deploy\"" will trigger the creation of Log Analytics tables and a Data Collection Rule (DCR). \nIt will then create an Entra application, link the DCR to it, and set the entered secret in the application. This setup enables data to be sent securely to the DCR using an Entra token.""}}, {""parameters"": {""label"": ""Deploy Varonis connector resources"", ""applicationDisplayName"": ""Varonis Purview Connector Application""}, ""type"": ""DeployPushConnectorButton""}]}, {""title"": ""2. Push your logs into the workspace"", ""description"": ""Use the following parameters to configure the Varonis Purview Connector in your Varonis integrations dashboard."", ""instructions"": [{""parameters"": {""label"": ""Tenant ID (Directory ID)"", ""fillWith"": [""TenantId""]}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Application ID"", ""fillWith"": [""ApplicationId""], ""placeholder"": ""Deploy push connector to get the App Registration Application ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Entra App Registration Secret"", ""fillWith"": [""ApplicationSecret""], ""placeholder"": ""Deploy push connector to get the App Registration Secret""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Endpoint Uri"", ""fillWith"": [""DataCollectionEndpoint""], ""placeholder"": ""Deploy push connector to get the Data Collection Endpoint Uri""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Data Collection Rule Immutable ID"", ""fillWith"": [""DataCollectionRuleId""], ""placeholder"": ""Deploy push connector to get the Data Collection Rule Immutable ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""label"": ""Resources Stream Name"", ""value"": ""Custom-varonisresources""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft Entra"", ""description"": ""Permission to create an app registration in Microsoft Entra ID. Typically requires Entra ID Application Developer role or higher.""}, {""name"": ""Microsoft Azure"", ""description"": ""Permission to assign Monitoring Metrics Publisher role on data collection rule (DCR). Typically requires Azure RBAC Owner or User Access Administrator role""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Varonis%20Purview/Data%20Connectors/VaronisPurview_ccp/VaronisPurview_connectorDefinition.json","true" +"VaronisAlerts_CL","VaronisSaaS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VaronisSaaS","varonis","microsoft-sentinel-solution-varonissaas","2023-11-10","2023-11-10","","Varonis","Partner","https://www.varonis.com/resources/support","","domains","VaronisSaaS","Varonis","Varonis SaaS","Varonis SaaS provides the capability to ingest [Varonis Alerts](https://www.varonis.com/products/datalert) into Microsoft Sentinel.

Varonis prioritizes deep data visibility, classification capabilities, and automated remediation for data access. Varonis builds a single prioritized view of risk for your data, so you can proactively and systematically eliminate risk from insider threats and cyberattacks.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to Varonis DatAlert service to pull alerts into Microsoft Sentinel. This might result in additional data ingestion costs. See the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": ""**For Azure function and related services installation use:**\n\n [![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FVaronisSaaS%2FData%2520Connectors%2Fazuredeploy.json)""}, {""title"": """", ""description"": ""STEP 1 - Obtain the Varonis DatAlert Endpoint API credentials.\n\n To generate the Client ID and API key:\n 1. Launch the Varonis Web Interface.\n 2. Navigate to Configuration -> API Keys. The API Keys page is displayed.\n 3. Click Create API Key. The Add New API Key settings are displayed on the right.\n 4. Fill in the name and description.\n 5. Click the Generate Key button.\n 6. Copy the API key secret and save it in a handy location. You won't be able to copy it again.\n\nFor additional information, please check: [Varonis Documentation](https://help.varonis.com/s/document-item?bundleId=ami1661784208197&topicId=emp1703144742927.html&_LANG=enus)""}, {""title"": """", ""description"": ""STEP 2 - Deploy the connector and the associated Azure Function."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceName""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}]}, {""title"": """", ""description"": ""Use this method for automated deployment of the data connector using an ARM Template.\n\n1. Click the Deploy to Azure button. \n\n\t[![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FVaronisSaaS%2FData%2520Connectors%2Fazuredeploy.json)\n2. Select the preferred Subscription, Resource Group, Region, Storage Account Type.\n3. Enter Log Analytics Workspace Name, Varonis FQDN, Varonis SaaS API Key.\n4. Click Review + Create, Create.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VaronisSaaS/Data%20Connectors/VaronisSaaS_API_FunctionApp.json","true" +"CommonSecurityLog","Vectra AI Detect","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Detect","vectraaiinc","ai_vectra_detect_mss","2022-05-24","2023-04-17","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","AIVectraDetect","Vectra AI","[Deprecated] Vectra AI Detect via Legacy Agent","The AI Vectra Detect connector allows users to connect Vectra Detect logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives users more insight into their organization's network and improves their security operation capabilities.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 over TCP, UDP or TLS.\n\n> 1. Make sure that you have Python on your machine using the following command: python --version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward AI Vectra Detect logs to Syslog agent in CEF format"", ""description"": ""Configure Vectra (X Series) Agent to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent.\n\nFrom the Vectra UI, navigate to Settings > Notifications and Edit Syslog configuration. Follow below instructions to set up the connection:\n\n- Add a new Destination (which is the host where the Microsoft Sentinel Syslog Agent is running)\n\n- Set the Port as **514**\n\n- Set the Protocol as **UDP**\n\n- Set the format to **CEF**\n\n- Set Log types (Select all log types available)\n\n- Click on **Save**\n\nUser can click the **Test** button to force send some test events.\n\n For more information, refer to Cognito Detect Syslog Guide which can be downloaded from the ressource page in Detect UI.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python --version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Detect/Data%20Connectors/AIVectraDetect.json","true" +"CommonSecurityLog","Vectra AI Detect","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Detect","vectraaiinc","ai_vectra_detect_mss","2022-05-24","2023-04-17","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","AIVectraDetectAma","Vectra AI","[Deprecated] Vectra AI Detect via AMA","The AI Vectra Detect connector allows users to connect Vectra Detect logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. This gives users more insight into their organization's network and improves their security operation capabilities.","[{""title"": """", ""description"": """", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine""}, {""title"": ""Step B. Forward AI Vectra Detect logs to Syslog agent in CEF format"", ""description"": ""Configure Vectra (X Series) Agent to forward Syslog messages in CEF format to your Microsoft Sentinel workspace via the Syslog agent.\n\nFrom the Vectra UI, navigate to Settings > Notifications and Edit Syslog configuration. Follow below instructions to set up the connection:\n\n- Add a new Destination (which is the host where the Microsoft Sentinel Syslog Agent is running)\n\n- Set the Port as **514**\n\n- Set the Protocol as **UDP**\n\n- Set the format to **CEF**\n\n- Set Log types (Select all log types available)\n\n- Click on **Save**\n\nUser can click the **Test** button to force send some test events.\n\n For more information, refer to Cognito Detect Syslog Guide which can be downloaded from the ressource page in Detect UI.""}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Detect/Data%20Connectors/template_AIVectraDetectAma.json","true" +"VectraStream","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","AIVectraStream","Vectra AI","AI Vectra Stream via Legacy Agent","The AI Vectra Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected **VectraStream** which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Install the Linux agent on sperate Linux instance.\n\n> Logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Follow the configuration steps below to get Vectra Stream metadata into Microsoft Sentinel. The Log Analytics agent is leveraged to send custom JSON into Azure Monitor, enabling the storage of the metadata into a custom table. For more information, refer to the [Azure Monitor Documentation](https://docs.microsoft.com/azure/azure-monitor/agents/data-sources-json).\n1. Download config file for the log analytics agent: VectraStream.conf (located in the Connector folder within the Vectra solution: https://aka.ms/sentinel-aivectrastream-conf).\n2. Login to the server where you have installed Azure Log Analytics agent.\n3. Copy VectraStream.conf to the /etc/opt/microsoft/omsagent/**workspace_id**/conf/omsagent.d/ folder.\n4. Edit VectraStream.conf as follows:\n\n\t i. configure an alternate port to send data to, if desired. Default port is 29009.\n\n\t ii. replace **workspace_id** with real value of your Workspace ID.\n5. Save changes and restart the Azure Log Analytics agent for Linux service with the following command:\n\t\tsudo /opt/microsoft/omsagent/bin/service_control restart"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Configure and connect Vectra AI Stream"", ""description"": ""Configure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via the Log Analytics Agent.\n\nFrom the Vectra UI, navigate to Settings > Cognito Stream and Edit the destination configuration:\n\n- Select Publisher: RAW JSON\n\n- Set the server IP or hostname (which is the host which run the Log Analytics Agent)\n\n- Set all the port to **29009** (this port can be modified if required)\n\n- Save\n\n- Set Log types (Select all log types available)\n\n- Click on **Save**\n\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Brain"", ""description"": ""must be configured to export Stream metadata in JSON""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/Connector_VectraAI_Stream.json","true" +"VectraStream_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","AIVectraStream","Vectra AI","AI Vectra Stream via Legacy Agent","The AI Vectra Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected **VectraStream** which is deployed with the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Install the Linux agent on sperate Linux instance.\n\n> Logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Follow the configuration steps below to get Vectra Stream metadata into Microsoft Sentinel. The Log Analytics agent is leveraged to send custom JSON into Azure Monitor, enabling the storage of the metadata into a custom table. For more information, refer to the [Azure Monitor Documentation](https://docs.microsoft.com/azure/azure-monitor/agents/data-sources-json).\n1. Download config file for the log analytics agent: VectraStream.conf (located in the Connector folder within the Vectra solution: https://aka.ms/sentinel-aivectrastream-conf).\n2. Login to the server where you have installed Azure Log Analytics agent.\n3. Copy VectraStream.conf to the /etc/opt/microsoft/omsagent/**workspace_id**/conf/omsagent.d/ folder.\n4. Edit VectraStream.conf as follows:\n\n\t i. configure an alternate port to send data to, if desired. Default port is 29009.\n\n\t ii. replace **workspace_id** with real value of your Workspace ID.\n5. Save changes and restart the Azure Log Analytics agent for Linux service with the following command:\n\t\tsudo /opt/microsoft/omsagent/bin/service_control restart"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}]}, {""title"": ""3. Configure and connect Vectra AI Stream"", ""description"": ""Configure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via the Log Analytics Agent.\n\nFrom the Vectra UI, navigate to Settings > Cognito Stream and Edit the destination configuration:\n\n- Select Publisher: RAW JSON\n\n- Set the server IP or hostname (which is the host which run the Log Analytics Agent)\n\n- Set all the port to **29009** (this port can be modified if required)\n\n- Save\n\n- Set Log types (Select all log types available)\n\n- Click on **Save**\n\n""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Brain"", ""description"": ""must be configured to export Stream metadata in JSON""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/Connector_VectraAI_Stream.json","true" +"vectra_beacon_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on a Kusto Function to work as expected which are deployed as part of the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** Vectra AI Stream connector is only available for **Linux** agents with **syslog-ng**. Make sure that syslog-ng is installed!\n\n In the first part, we are going to create the custom tables requires for this solution (using an ARM template). Then we are going to configure the Data Connector."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Create custom tables in Log Analytic Workspace (ARM Template)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fvectranetworks%2FMicrosoft_Sentinel%2Fmain%2FStream%2FAMA%2FARM_Templates%2Fazuredeploy_CustomTables_connector.json)\n2. Provide the required details such as the resource group and Microsoft Log Analytics Workspace (**the workspace must exist!**)\n4. Click **Review + Create** to deploy.\n\n\t_Note: Once deployed, you must be able to see the custom tables in your Log Analytic Workspace (Settings ---> Tables)._"", ""instructions"": []}, {""title"": ""Step 2. Install the Syslog via AMA Data connector"", ""description"": ""_Note: This is only required if it has not been install yet in Microsoft Sentinel._\n1. Microsoft Sentinel workspace ---> Content Management ---> Content Hub.\n\n2. Search for 'Syslog' (Provider is Microsoft) and select it.\n\n3. Check 'Install' buton on the bottom of the right panel."", ""instructions"": []}, {""title"": ""Step 3. Configure the Syslog via AMA data connector"", ""description"": ""_Note: Two different Data Collection Rules (DCR) are going to be created during this step_\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector.\n\n2. Search for 'Syslog via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs: LOG_USER/LOG_NOTICE and LOG_LOCAL0/LOG_NOTICE.\n\n4. Create a first DCR (Data Collection Rule). Specify a name. Then, in the Resources tab, select the instance where AMA is going to run. In the Collect tab, select LOG_USER/LOG_NOTICE.\n\n5. Create a second DCR. Specify a different name. Then, in the Resources tab, choose the same host. In the Collect tab, select LOG_LOCAL0/LOG_NOTICE\n\n\n\n\tNote:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""In the next section, we are goning to modify the syslog-ng configuration that has been created where the AMA is deployed. Then, we are going to modify the DCR configuration to be able to sent the network metadata from Vectra Stream to different custom tables."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Modify the syslog-ng configuration"", ""description"": ""_Note: A DCR cannot have more than 10 output flows. As we have 16 custom tables in this solution, we need to split the traffic to two DCR using syslog-ng._\n1. Download the modified syslog-ng configuration file: [azuremonitoragent-tcp.conf](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/syslog-ng/azuremonitoragent-tcp.conf).\n2. Log into the instance where syslog-ng/AMA is running.\n3. Browse to /etc/syslog-ng/conf.d/ and replace the content of _azuremonitoragent-tcp.conf_ file with the one that you just downloaded.\n4. Save and restart syslog-ng (_systemctl restart syslog-ng_)."", ""instructions"": []}, {""title"": ""Step 2. Modify the Data Collection rules configuration"", ""description"": ""_Note: The Data Collection Rules that have been created are located in Azure Monitor (**Monitor ---> Settings ---> Data Collection Rules**)_\n 1. Locate the 2 DCR that you created in Microsoft Sentinel.\n 2. Open the first DCR where Syslog facility is LOG_USER. Then go to Automation ---> Export template ---> Deploy --> Edit template.\n 3. Download the dataFlows configuration for LOG_USER DCR: [Stream_DataFlows_dcr1.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr1.json) and find/replace the destination placeholder '' with your workspace name.\n 4. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 5. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace (same as step 3).\n 6. Save --> Review + Create --> Create.\n 7. Open the second DCR than you created (Facilily is LOG_LOCAL0) and edit the template (Automation ---> Export template ---> Deploy --> Edit template).\n 8. Download the dataFlows configuration for LOG_LOCAL0 DCR: [Stream_DataFlows_dcr2.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr2.json) and find/replace the destination placeholder '' with your wokrspace name.\n 9. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 10. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace.\n 11. Save --> Review + Create --> Create."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Configure Vectra AI Stream"", ""description"": ""\nConfigure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via AMA.\n\nFrom the Vectra UI, navigate to Settings > Stream and Edit the destination configuration:\n\n 1. Select Publisher: RAW JSON\n 2. Set the server IP or hostname (which is the host whhere AMA is running)\n 3. Set all the port to **514**.\n 4. Save.""}, {""title"": ""Run the following command to validate (or set up) that syslog-ng is listening on port 514"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Stream configuration"", ""description"": ""must be configured to export Stream metadata in JSON""}, {""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" +"vectra_dcerpc_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on a Kusto Function to work as expected which are deployed as part of the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** Vectra AI Stream connector is only available for **Linux** agents with **syslog-ng**. Make sure that syslog-ng is installed!\n\n In the first part, we are going to create the custom tables requires for this solution (using an ARM template). Then we are going to configure the Data Connector."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Create custom tables in Log Analytic Workspace (ARM Template)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fvectranetworks%2FMicrosoft_Sentinel%2Fmain%2FStream%2FAMA%2FARM_Templates%2Fazuredeploy_CustomTables_connector.json)\n2. Provide the required details such as the resource group and Microsoft Log Analytics Workspace (**the workspace must exist!**)\n4. Click **Review + Create** to deploy.\n\n\t_Note: Once deployed, you must be able to see the custom tables in your Log Analytic Workspace (Settings ---> Tables)._"", ""instructions"": []}, {""title"": ""Step 2. Install the Syslog via AMA Data connector"", ""description"": ""_Note: This is only required if it has not been install yet in Microsoft Sentinel._\n1. Microsoft Sentinel workspace ---> Content Management ---> Content Hub.\n\n2. Search for 'Syslog' (Provider is Microsoft) and select it.\n\n3. Check 'Install' buton on the bottom of the right panel."", ""instructions"": []}, {""title"": ""Step 3. Configure the Syslog via AMA data connector"", ""description"": ""_Note: Two different Data Collection Rules (DCR) are going to be created during this step_\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector.\n\n2. Search for 'Syslog via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs: LOG_USER/LOG_NOTICE and LOG_LOCAL0/LOG_NOTICE.\n\n4. Create a first DCR (Data Collection Rule). Specify a name. Then, in the Resources tab, select the instance where AMA is going to run. In the Collect tab, select LOG_USER/LOG_NOTICE.\n\n5. Create a second DCR. Specify a different name. Then, in the Resources tab, choose the same host. In the Collect tab, select LOG_LOCAL0/LOG_NOTICE\n\n\n\n\tNote:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""In the next section, we are goning to modify the syslog-ng configuration that has been created where the AMA is deployed. Then, we are going to modify the DCR configuration to be able to sent the network metadata from Vectra Stream to different custom tables."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Modify the syslog-ng configuration"", ""description"": ""_Note: A DCR cannot have more than 10 output flows. As we have 16 custom tables in this solution, we need to split the traffic to two DCR using syslog-ng._\n1. Download the modified syslog-ng configuration file: [azuremonitoragent-tcp.conf](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/syslog-ng/azuremonitoragent-tcp.conf).\n2. Log into the instance where syslog-ng/AMA is running.\n3. Browse to /etc/syslog-ng/conf.d/ and replace the content of _azuremonitoragent-tcp.conf_ file with the one that you just downloaded.\n4. Save and restart syslog-ng (_systemctl restart syslog-ng_)."", ""instructions"": []}, {""title"": ""Step 2. Modify the Data Collection rules configuration"", ""description"": ""_Note: The Data Collection Rules that have been created are located in Azure Monitor (**Monitor ---> Settings ---> Data Collection Rules**)_\n 1. Locate the 2 DCR that you created in Microsoft Sentinel.\n 2. Open the first DCR where Syslog facility is LOG_USER. Then go to Automation ---> Export template ---> Deploy --> Edit template.\n 3. Download the dataFlows configuration for LOG_USER DCR: [Stream_DataFlows_dcr1.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr1.json) and find/replace the destination placeholder '' with your workspace name.\n 4. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 5. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace (same as step 3).\n 6. Save --> Review + Create --> Create.\n 7. Open the second DCR than you created (Facilily is LOG_LOCAL0) and edit the template (Automation ---> Export template ---> Deploy --> Edit template).\n 8. Download the dataFlows configuration for LOG_LOCAL0 DCR: [Stream_DataFlows_dcr2.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr2.json) and find/replace the destination placeholder '' with your wokrspace name.\n 9. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 10. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace.\n 11. Save --> Review + Create --> Create."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Configure Vectra AI Stream"", ""description"": ""\nConfigure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via AMA.\n\nFrom the Vectra UI, navigate to Settings > Stream and Edit the destination configuration:\n\n 1. Select Publisher: RAW JSON\n 2. Set the server IP or hostname (which is the host whhere AMA is running)\n 3. Set all the port to **514**.\n 4. Save.""}, {""title"": ""Run the following command to validate (or set up) that syslog-ng is listening on port 514"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Stream configuration"", ""description"": ""must be configured to export Stream metadata in JSON""}, {""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" +"vectra_dhcp_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on a Kusto Function to work as expected which are deployed as part of the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** Vectra AI Stream connector is only available for **Linux** agents with **syslog-ng**. Make sure that syslog-ng is installed!\n\n In the first part, we are going to create the custom tables requires for this solution (using an ARM template). Then we are going to configure the Data Connector."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Create custom tables in Log Analytic Workspace (ARM Template)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fvectranetworks%2FMicrosoft_Sentinel%2Fmain%2FStream%2FAMA%2FARM_Templates%2Fazuredeploy_CustomTables_connector.json)\n2. Provide the required details such as the resource group and Microsoft Log Analytics Workspace (**the workspace must exist!**)\n4. Click **Review + Create** to deploy.\n\n\t_Note: Once deployed, you must be able to see the custom tables in your Log Analytic Workspace (Settings ---> Tables)._"", ""instructions"": []}, {""title"": ""Step 2. Install the Syslog via AMA Data connector"", ""description"": ""_Note: This is only required if it has not been install yet in Microsoft Sentinel._\n1. Microsoft Sentinel workspace ---> Content Management ---> Content Hub.\n\n2. Search for 'Syslog' (Provider is Microsoft) and select it.\n\n3. Check 'Install' buton on the bottom of the right panel."", ""instructions"": []}, {""title"": ""Step 3. Configure the Syslog via AMA data connector"", ""description"": ""_Note: Two different Data Collection Rules (DCR) are going to be created during this step_\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector.\n\n2. Search for 'Syslog via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs: LOG_USER/LOG_NOTICE and LOG_LOCAL0/LOG_NOTICE.\n\n4. Create a first DCR (Data Collection Rule). Specify a name. Then, in the Resources tab, select the instance where AMA is going to run. In the Collect tab, select LOG_USER/LOG_NOTICE.\n\n5. Create a second DCR. Specify a different name. Then, in the Resources tab, choose the same host. In the Collect tab, select LOG_LOCAL0/LOG_NOTICE\n\n\n\n\tNote:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""In the next section, we are goning to modify the syslog-ng configuration that has been created where the AMA is deployed. Then, we are going to modify the DCR configuration to be able to sent the network metadata from Vectra Stream to different custom tables."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Modify the syslog-ng configuration"", ""description"": ""_Note: A DCR cannot have more than 10 output flows. As we have 16 custom tables in this solution, we need to split the traffic to two DCR using syslog-ng._\n1. Download the modified syslog-ng configuration file: [azuremonitoragent-tcp.conf](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/syslog-ng/azuremonitoragent-tcp.conf).\n2. Log into the instance where syslog-ng/AMA is running.\n3. Browse to /etc/syslog-ng/conf.d/ and replace the content of _azuremonitoragent-tcp.conf_ file with the one that you just downloaded.\n4. Save and restart syslog-ng (_systemctl restart syslog-ng_)."", ""instructions"": []}, {""title"": ""Step 2. Modify the Data Collection rules configuration"", ""description"": ""_Note: The Data Collection Rules that have been created are located in Azure Monitor (**Monitor ---> Settings ---> Data Collection Rules**)_\n 1. Locate the 2 DCR that you created in Microsoft Sentinel.\n 2. Open the first DCR where Syslog facility is LOG_USER. Then go to Automation ---> Export template ---> Deploy --> Edit template.\n 3. Download the dataFlows configuration for LOG_USER DCR: [Stream_DataFlows_dcr1.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr1.json) and find/replace the destination placeholder '' with your workspace name.\n 4. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 5. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace (same as step 3).\n 6. Save --> Review + Create --> Create.\n 7. Open the second DCR than you created (Facilily is LOG_LOCAL0) and edit the template (Automation ---> Export template ---> Deploy --> Edit template).\n 8. Download the dataFlows configuration for LOG_LOCAL0 DCR: [Stream_DataFlows_dcr2.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr2.json) and find/replace the destination placeholder '' with your wokrspace name.\n 9. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 10. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace.\n 11. Save --> Review + Create --> Create."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Configure Vectra AI Stream"", ""description"": ""\nConfigure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via AMA.\n\nFrom the Vectra UI, navigate to Settings > Stream and Edit the destination configuration:\n\n 1. Select Publisher: RAW JSON\n 2. Set the server IP or hostname (which is the host whhere AMA is running)\n 3. Set all the port to **514**.\n 4. Save.""}, {""title"": ""Run the following command to validate (or set up) that syslog-ng is listening on port 514"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Stream configuration"", ""description"": ""must be configured to export Stream metadata in JSON""}, {""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" +"vectra_dns_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on a Kusto Function to work as expected which are deployed as part of the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** Vectra AI Stream connector is only available for **Linux** agents with **syslog-ng**. Make sure that syslog-ng is installed!\n\n In the first part, we are going to create the custom tables requires for this solution (using an ARM template). Then we are going to configure the Data Connector."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Create custom tables in Log Analytic Workspace (ARM Template)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fvectranetworks%2FMicrosoft_Sentinel%2Fmain%2FStream%2FAMA%2FARM_Templates%2Fazuredeploy_CustomTables_connector.json)\n2. Provide the required details such as the resource group and Microsoft Log Analytics Workspace (**the workspace must exist!**)\n4. Click **Review + Create** to deploy.\n\n\t_Note: Once deployed, you must be able to see the custom tables in your Log Analytic Workspace (Settings ---> Tables)._"", ""instructions"": []}, {""title"": ""Step 2. Install the Syslog via AMA Data connector"", ""description"": ""_Note: This is only required if it has not been install yet in Microsoft Sentinel._\n1. Microsoft Sentinel workspace ---> Content Management ---> Content Hub.\n\n2. Search for 'Syslog' (Provider is Microsoft) and select it.\n\n3. Check 'Install' buton on the bottom of the right panel."", ""instructions"": []}, {""title"": ""Step 3. Configure the Syslog via AMA data connector"", ""description"": ""_Note: Two different Data Collection Rules (DCR) are going to be created during this step_\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector.\n\n2. Search for 'Syslog via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs: LOG_USER/LOG_NOTICE and LOG_LOCAL0/LOG_NOTICE.\n\n4. Create a first DCR (Data Collection Rule). Specify a name. Then, in the Resources tab, select the instance where AMA is going to run. In the Collect tab, select LOG_USER/LOG_NOTICE.\n\n5. Create a second DCR. Specify a different name. Then, in the Resources tab, choose the same host. In the Collect tab, select LOG_LOCAL0/LOG_NOTICE\n\n\n\n\tNote:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""In the next section, we are goning to modify the syslog-ng configuration that has been created where the AMA is deployed. Then, we are going to modify the DCR configuration to be able to sent the network metadata from Vectra Stream to different custom tables."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Modify the syslog-ng configuration"", ""description"": ""_Note: A DCR cannot have more than 10 output flows. As we have 16 custom tables in this solution, we need to split the traffic to two DCR using syslog-ng._\n1. Download the modified syslog-ng configuration file: [azuremonitoragent-tcp.conf](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/syslog-ng/azuremonitoragent-tcp.conf).\n2. Log into the instance where syslog-ng/AMA is running.\n3. Browse to /etc/syslog-ng/conf.d/ and replace the content of _azuremonitoragent-tcp.conf_ file with the one that you just downloaded.\n4. Save and restart syslog-ng (_systemctl restart syslog-ng_)."", ""instructions"": []}, {""title"": ""Step 2. Modify the Data Collection rules configuration"", ""description"": ""_Note: The Data Collection Rules that have been created are located in Azure Monitor (**Monitor ---> Settings ---> Data Collection Rules**)_\n 1. Locate the 2 DCR that you created in Microsoft Sentinel.\n 2. Open the first DCR where Syslog facility is LOG_USER. Then go to Automation ---> Export template ---> Deploy --> Edit template.\n 3. Download the dataFlows configuration for LOG_USER DCR: [Stream_DataFlows_dcr1.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr1.json) and find/replace the destination placeholder '' with your workspace name.\n 4. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 5. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace (same as step 3).\n 6. Save --> Review + Create --> Create.\n 7. Open the second DCR than you created (Facilily is LOG_LOCAL0) and edit the template (Automation ---> Export template ---> Deploy --> Edit template).\n 8. Download the dataFlows configuration for LOG_LOCAL0 DCR: [Stream_DataFlows_dcr2.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr2.json) and find/replace the destination placeholder '' with your wokrspace name.\n 9. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 10. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace.\n 11. Save --> Review + Create --> Create."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Configure Vectra AI Stream"", ""description"": ""\nConfigure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via AMA.\n\nFrom the Vectra UI, navigate to Settings > Stream and Edit the destination configuration:\n\n 1. Select Publisher: RAW JSON\n 2. Set the server IP or hostname (which is the host whhere AMA is running)\n 3. Set all the port to **514**.\n 4. Save.""}, {""title"": ""Run the following command to validate (or set up) that syslog-ng is listening on port 514"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Stream configuration"", ""description"": ""must be configured to export Stream metadata in JSON""}, {""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" +"vectra_http_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on a Kusto Function to work as expected which are deployed as part of the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** Vectra AI Stream connector is only available for **Linux** agents with **syslog-ng**. Make sure that syslog-ng is installed!\n\n In the first part, we are going to create the custom tables requires for this solution (using an ARM template). Then we are going to configure the Data Connector."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Create custom tables in Log Analytic Workspace (ARM Template)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fvectranetworks%2FMicrosoft_Sentinel%2Fmain%2FStream%2FAMA%2FARM_Templates%2Fazuredeploy_CustomTables_connector.json)\n2. Provide the required details such as the resource group and Microsoft Log Analytics Workspace (**the workspace must exist!**)\n4. Click **Review + Create** to deploy.\n\n\t_Note: Once deployed, you must be able to see the custom tables in your Log Analytic Workspace (Settings ---> Tables)._"", ""instructions"": []}, {""title"": ""Step 2. Install the Syslog via AMA Data connector"", ""description"": ""_Note: This is only required if it has not been install yet in Microsoft Sentinel._\n1. Microsoft Sentinel workspace ---> Content Management ---> Content Hub.\n\n2. Search for 'Syslog' (Provider is Microsoft) and select it.\n\n3. Check 'Install' buton on the bottom of the right panel."", ""instructions"": []}, {""title"": ""Step 3. Configure the Syslog via AMA data connector"", ""description"": ""_Note: Two different Data Collection Rules (DCR) are going to be created during this step_\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector.\n\n2. Search for 'Syslog via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs: LOG_USER/LOG_NOTICE and LOG_LOCAL0/LOG_NOTICE.\n\n4. Create a first DCR (Data Collection Rule). Specify a name. Then, in the Resources tab, select the instance where AMA is going to run. In the Collect tab, select LOG_USER/LOG_NOTICE.\n\n5. Create a second DCR. Specify a different name. Then, in the Resources tab, choose the same host. In the Collect tab, select LOG_LOCAL0/LOG_NOTICE\n\n\n\n\tNote:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""In the next section, we are goning to modify the syslog-ng configuration that has been created where the AMA is deployed. Then, we are going to modify the DCR configuration to be able to sent the network metadata from Vectra Stream to different custom tables."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Modify the syslog-ng configuration"", ""description"": ""_Note: A DCR cannot have more than 10 output flows. As we have 16 custom tables in this solution, we need to split the traffic to two DCR using syslog-ng._\n1. Download the modified syslog-ng configuration file: [azuremonitoragent-tcp.conf](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/syslog-ng/azuremonitoragent-tcp.conf).\n2. Log into the instance where syslog-ng/AMA is running.\n3. Browse to /etc/syslog-ng/conf.d/ and replace the content of _azuremonitoragent-tcp.conf_ file with the one that you just downloaded.\n4. Save and restart syslog-ng (_systemctl restart syslog-ng_)."", ""instructions"": []}, {""title"": ""Step 2. Modify the Data Collection rules configuration"", ""description"": ""_Note: The Data Collection Rules that have been created are located in Azure Monitor (**Monitor ---> Settings ---> Data Collection Rules**)_\n 1. Locate the 2 DCR that you created in Microsoft Sentinel.\n 2. Open the first DCR where Syslog facility is LOG_USER. Then go to Automation ---> Export template ---> Deploy --> Edit template.\n 3. Download the dataFlows configuration for LOG_USER DCR: [Stream_DataFlows_dcr1.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr1.json) and find/replace the destination placeholder '' with your workspace name.\n 4. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 5. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace (same as step 3).\n 6. Save --> Review + Create --> Create.\n 7. Open the second DCR than you created (Facilily is LOG_LOCAL0) and edit the template (Automation ---> Export template ---> Deploy --> Edit template).\n 8. Download the dataFlows configuration for LOG_LOCAL0 DCR: [Stream_DataFlows_dcr2.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr2.json) and find/replace the destination placeholder '' with your wokrspace name.\n 9. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 10. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace.\n 11. Save --> Review + Create --> Create."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Configure Vectra AI Stream"", ""description"": ""\nConfigure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via AMA.\n\nFrom the Vectra UI, navigate to Settings > Stream and Edit the destination configuration:\n\n 1. Select Publisher: RAW JSON\n 2. Set the server IP or hostname (which is the host whhere AMA is running)\n 3. Set all the port to **514**.\n 4. Save.""}, {""title"": ""Run the following command to validate (or set up) that syslog-ng is listening on port 514"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Stream configuration"", ""description"": ""must be configured to export Stream metadata in JSON""}, {""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" +"vectra_isession_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on a Kusto Function to work as expected which are deployed as part of the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** Vectra AI Stream connector is only available for **Linux** agents with **syslog-ng**. Make sure that syslog-ng is installed!\n\n In the first part, we are going to create the custom tables requires for this solution (using an ARM template). Then we are going to configure the Data Connector."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Create custom tables in Log Analytic Workspace (ARM Template)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fvectranetworks%2FMicrosoft_Sentinel%2Fmain%2FStream%2FAMA%2FARM_Templates%2Fazuredeploy_CustomTables_connector.json)\n2. Provide the required details such as the resource group and Microsoft Log Analytics Workspace (**the workspace must exist!**)\n4. Click **Review + Create** to deploy.\n\n\t_Note: Once deployed, you must be able to see the custom tables in your Log Analytic Workspace (Settings ---> Tables)._"", ""instructions"": []}, {""title"": ""Step 2. Install the Syslog via AMA Data connector"", ""description"": ""_Note: This is only required if it has not been install yet in Microsoft Sentinel._\n1. Microsoft Sentinel workspace ---> Content Management ---> Content Hub.\n\n2. Search for 'Syslog' (Provider is Microsoft) and select it.\n\n3. Check 'Install' buton on the bottom of the right panel."", ""instructions"": []}, {""title"": ""Step 3. Configure the Syslog via AMA data connector"", ""description"": ""_Note: Two different Data Collection Rules (DCR) are going to be created during this step_\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector.\n\n2. Search for 'Syslog via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs: LOG_USER/LOG_NOTICE and LOG_LOCAL0/LOG_NOTICE.\n\n4. Create a first DCR (Data Collection Rule). Specify a name. Then, in the Resources tab, select the instance where AMA is going to run. In the Collect tab, select LOG_USER/LOG_NOTICE.\n\n5. Create a second DCR. Specify a different name. Then, in the Resources tab, choose the same host. In the Collect tab, select LOG_LOCAL0/LOG_NOTICE\n\n\n\n\tNote:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""In the next section, we are goning to modify the syslog-ng configuration that has been created where the AMA is deployed. Then, we are going to modify the DCR configuration to be able to sent the network metadata from Vectra Stream to different custom tables."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Modify the syslog-ng configuration"", ""description"": ""_Note: A DCR cannot have more than 10 output flows. As we have 16 custom tables in this solution, we need to split the traffic to two DCR using syslog-ng._\n1. Download the modified syslog-ng configuration file: [azuremonitoragent-tcp.conf](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/syslog-ng/azuremonitoragent-tcp.conf).\n2. Log into the instance where syslog-ng/AMA is running.\n3. Browse to /etc/syslog-ng/conf.d/ and replace the content of _azuremonitoragent-tcp.conf_ file with the one that you just downloaded.\n4. Save and restart syslog-ng (_systemctl restart syslog-ng_)."", ""instructions"": []}, {""title"": ""Step 2. Modify the Data Collection rules configuration"", ""description"": ""_Note: The Data Collection Rules that have been created are located in Azure Monitor (**Monitor ---> Settings ---> Data Collection Rules**)_\n 1. Locate the 2 DCR that you created in Microsoft Sentinel.\n 2. Open the first DCR where Syslog facility is LOG_USER. Then go to Automation ---> Export template ---> Deploy --> Edit template.\n 3. Download the dataFlows configuration for LOG_USER DCR: [Stream_DataFlows_dcr1.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr1.json) and find/replace the destination placeholder '' with your workspace name.\n 4. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 5. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace (same as step 3).\n 6. Save --> Review + Create --> Create.\n 7. Open the second DCR than you created (Facilily is LOG_LOCAL0) and edit the template (Automation ---> Export template ---> Deploy --> Edit template).\n 8. Download the dataFlows configuration for LOG_LOCAL0 DCR: [Stream_DataFlows_dcr2.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr2.json) and find/replace the destination placeholder '' with your wokrspace name.\n 9. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 10. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace.\n 11. Save --> Review + Create --> Create."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Configure Vectra AI Stream"", ""description"": ""\nConfigure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via AMA.\n\nFrom the Vectra UI, navigate to Settings > Stream and Edit the destination configuration:\n\n 1. Select Publisher: RAW JSON\n 2. Set the server IP or hostname (which is the host whhere AMA is running)\n 3. Set all the port to **514**.\n 4. Save.""}, {""title"": ""Run the following command to validate (or set up) that syslog-ng is listening on port 514"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Stream configuration"", ""description"": ""must be configured to export Stream metadata in JSON""}, {""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" +"vectra_kerberos_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on a Kusto Function to work as expected which are deployed as part of the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** Vectra AI Stream connector is only available for **Linux** agents with **syslog-ng**. Make sure that syslog-ng is installed!\n\n In the first part, we are going to create the custom tables requires for this solution (using an ARM template). Then we are going to configure the Data Connector."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Create custom tables in Log Analytic Workspace (ARM Template)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fvectranetworks%2FMicrosoft_Sentinel%2Fmain%2FStream%2FAMA%2FARM_Templates%2Fazuredeploy_CustomTables_connector.json)\n2. Provide the required details such as the resource group and Microsoft Log Analytics Workspace (**the workspace must exist!**)\n4. Click **Review + Create** to deploy.\n\n\t_Note: Once deployed, you must be able to see the custom tables in your Log Analytic Workspace (Settings ---> Tables)._"", ""instructions"": []}, {""title"": ""Step 2. Install the Syslog via AMA Data connector"", ""description"": ""_Note: This is only required if it has not been install yet in Microsoft Sentinel._\n1. Microsoft Sentinel workspace ---> Content Management ---> Content Hub.\n\n2. Search for 'Syslog' (Provider is Microsoft) and select it.\n\n3. Check 'Install' buton on the bottom of the right panel."", ""instructions"": []}, {""title"": ""Step 3. Configure the Syslog via AMA data connector"", ""description"": ""_Note: Two different Data Collection Rules (DCR) are going to be created during this step_\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector.\n\n2. Search for 'Syslog via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs: LOG_USER/LOG_NOTICE and LOG_LOCAL0/LOG_NOTICE.\n\n4. Create a first DCR (Data Collection Rule). Specify a name. Then, in the Resources tab, select the instance where AMA is going to run. In the Collect tab, select LOG_USER/LOG_NOTICE.\n\n5. Create a second DCR. Specify a different name. Then, in the Resources tab, choose the same host. In the Collect tab, select LOG_LOCAL0/LOG_NOTICE\n\n\n\n\tNote:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""In the next section, we are goning to modify the syslog-ng configuration that has been created where the AMA is deployed. Then, we are going to modify the DCR configuration to be able to sent the network metadata from Vectra Stream to different custom tables."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Modify the syslog-ng configuration"", ""description"": ""_Note: A DCR cannot have more than 10 output flows. As we have 16 custom tables in this solution, we need to split the traffic to two DCR using syslog-ng._\n1. Download the modified syslog-ng configuration file: [azuremonitoragent-tcp.conf](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/syslog-ng/azuremonitoragent-tcp.conf).\n2. Log into the instance where syslog-ng/AMA is running.\n3. Browse to /etc/syslog-ng/conf.d/ and replace the content of _azuremonitoragent-tcp.conf_ file with the one that you just downloaded.\n4. Save and restart syslog-ng (_systemctl restart syslog-ng_)."", ""instructions"": []}, {""title"": ""Step 2. Modify the Data Collection rules configuration"", ""description"": ""_Note: The Data Collection Rules that have been created are located in Azure Monitor (**Monitor ---> Settings ---> Data Collection Rules**)_\n 1. Locate the 2 DCR that you created in Microsoft Sentinel.\n 2. Open the first DCR where Syslog facility is LOG_USER. Then go to Automation ---> Export template ---> Deploy --> Edit template.\n 3. Download the dataFlows configuration for LOG_USER DCR: [Stream_DataFlows_dcr1.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr1.json) and find/replace the destination placeholder '' with your workspace name.\n 4. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 5. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace (same as step 3).\n 6. Save --> Review + Create --> Create.\n 7. Open the second DCR than you created (Facilily is LOG_LOCAL0) and edit the template (Automation ---> Export template ---> Deploy --> Edit template).\n 8. Download the dataFlows configuration for LOG_LOCAL0 DCR: [Stream_DataFlows_dcr2.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr2.json) and find/replace the destination placeholder '' with your wokrspace name.\n 9. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 10. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace.\n 11. Save --> Review + Create --> Create."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Configure Vectra AI Stream"", ""description"": ""\nConfigure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via AMA.\n\nFrom the Vectra UI, navigate to Settings > Stream and Edit the destination configuration:\n\n 1. Select Publisher: RAW JSON\n 2. Set the server IP or hostname (which is the host whhere AMA is running)\n 3. Set all the port to **514**.\n 4. Save.""}, {""title"": ""Run the following command to validate (or set up) that syslog-ng is listening on port 514"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Stream configuration"", ""description"": ""must be configured to export Stream metadata in JSON""}, {""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" +"vectra_ldap_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on a Kusto Function to work as expected which are deployed as part of the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** Vectra AI Stream connector is only available for **Linux** agents with **syslog-ng**. Make sure that syslog-ng is installed!\n\n In the first part, we are going to create the custom tables requires for this solution (using an ARM template). Then we are going to configure the Data Connector."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Create custom tables in Log Analytic Workspace (ARM Template)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fvectranetworks%2FMicrosoft_Sentinel%2Fmain%2FStream%2FAMA%2FARM_Templates%2Fazuredeploy_CustomTables_connector.json)\n2. Provide the required details such as the resource group and Microsoft Log Analytics Workspace (**the workspace must exist!**)\n4. Click **Review + Create** to deploy.\n\n\t_Note: Once deployed, you must be able to see the custom tables in your Log Analytic Workspace (Settings ---> Tables)._"", ""instructions"": []}, {""title"": ""Step 2. Install the Syslog via AMA Data connector"", ""description"": ""_Note: This is only required if it has not been install yet in Microsoft Sentinel._\n1. Microsoft Sentinel workspace ---> Content Management ---> Content Hub.\n\n2. Search for 'Syslog' (Provider is Microsoft) and select it.\n\n3. Check 'Install' buton on the bottom of the right panel."", ""instructions"": []}, {""title"": ""Step 3. Configure the Syslog via AMA data connector"", ""description"": ""_Note: Two different Data Collection Rules (DCR) are going to be created during this step_\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector.\n\n2. Search for 'Syslog via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs: LOG_USER/LOG_NOTICE and LOG_LOCAL0/LOG_NOTICE.\n\n4. Create a first DCR (Data Collection Rule). Specify a name. Then, in the Resources tab, select the instance where AMA is going to run. In the Collect tab, select LOG_USER/LOG_NOTICE.\n\n5. Create a second DCR. Specify a different name. Then, in the Resources tab, choose the same host. In the Collect tab, select LOG_LOCAL0/LOG_NOTICE\n\n\n\n\tNote:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""In the next section, we are goning to modify the syslog-ng configuration that has been created where the AMA is deployed. Then, we are going to modify the DCR configuration to be able to sent the network metadata from Vectra Stream to different custom tables."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Modify the syslog-ng configuration"", ""description"": ""_Note: A DCR cannot have more than 10 output flows. As we have 16 custom tables in this solution, we need to split the traffic to two DCR using syslog-ng._\n1. Download the modified syslog-ng configuration file: [azuremonitoragent-tcp.conf](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/syslog-ng/azuremonitoragent-tcp.conf).\n2. Log into the instance where syslog-ng/AMA is running.\n3. Browse to /etc/syslog-ng/conf.d/ and replace the content of _azuremonitoragent-tcp.conf_ file with the one that you just downloaded.\n4. Save and restart syslog-ng (_systemctl restart syslog-ng_)."", ""instructions"": []}, {""title"": ""Step 2. Modify the Data Collection rules configuration"", ""description"": ""_Note: The Data Collection Rules that have been created are located in Azure Monitor (**Monitor ---> Settings ---> Data Collection Rules**)_\n 1. Locate the 2 DCR that you created in Microsoft Sentinel.\n 2. Open the first DCR where Syslog facility is LOG_USER. Then go to Automation ---> Export template ---> Deploy --> Edit template.\n 3. Download the dataFlows configuration for LOG_USER DCR: [Stream_DataFlows_dcr1.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr1.json) and find/replace the destination placeholder '' with your workspace name.\n 4. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 5. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace (same as step 3).\n 6. Save --> Review + Create --> Create.\n 7. Open the second DCR than you created (Facilily is LOG_LOCAL0) and edit the template (Automation ---> Export template ---> Deploy --> Edit template).\n 8. Download the dataFlows configuration for LOG_LOCAL0 DCR: [Stream_DataFlows_dcr2.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr2.json) and find/replace the destination placeholder '' with your wokrspace name.\n 9. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 10. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace.\n 11. Save --> Review + Create --> Create."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Configure Vectra AI Stream"", ""description"": ""\nConfigure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via AMA.\n\nFrom the Vectra UI, navigate to Settings > Stream and Edit the destination configuration:\n\n 1. Select Publisher: RAW JSON\n 2. Set the server IP or hostname (which is the host whhere AMA is running)\n 3. Set all the port to **514**.\n 4. Save.""}, {""title"": ""Run the following command to validate (or set up) that syslog-ng is listening on port 514"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Stream configuration"", ""description"": ""must be configured to export Stream metadata in JSON""}, {""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" +"vectra_ntlm_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on a Kusto Function to work as expected which are deployed as part of the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** Vectra AI Stream connector is only available for **Linux** agents with **syslog-ng**. Make sure that syslog-ng is installed!\n\n In the first part, we are going to create the custom tables requires for this solution (using an ARM template). Then we are going to configure the Data Connector."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Create custom tables in Log Analytic Workspace (ARM Template)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fvectranetworks%2FMicrosoft_Sentinel%2Fmain%2FStream%2FAMA%2FARM_Templates%2Fazuredeploy_CustomTables_connector.json)\n2. Provide the required details such as the resource group and Microsoft Log Analytics Workspace (**the workspace must exist!**)\n4. Click **Review + Create** to deploy.\n\n\t_Note: Once deployed, you must be able to see the custom tables in your Log Analytic Workspace (Settings ---> Tables)._"", ""instructions"": []}, {""title"": ""Step 2. Install the Syslog via AMA Data connector"", ""description"": ""_Note: This is only required if it has not been install yet in Microsoft Sentinel._\n1. Microsoft Sentinel workspace ---> Content Management ---> Content Hub.\n\n2. Search for 'Syslog' (Provider is Microsoft) and select it.\n\n3. Check 'Install' buton on the bottom of the right panel."", ""instructions"": []}, {""title"": ""Step 3. Configure the Syslog via AMA data connector"", ""description"": ""_Note: Two different Data Collection Rules (DCR) are going to be created during this step_\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector.\n\n2. Search for 'Syslog via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs: LOG_USER/LOG_NOTICE and LOG_LOCAL0/LOG_NOTICE.\n\n4. Create a first DCR (Data Collection Rule). Specify a name. Then, in the Resources tab, select the instance where AMA is going to run. In the Collect tab, select LOG_USER/LOG_NOTICE.\n\n5. Create a second DCR. Specify a different name. Then, in the Resources tab, choose the same host. In the Collect tab, select LOG_LOCAL0/LOG_NOTICE\n\n\n\n\tNote:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""In the next section, we are goning to modify the syslog-ng configuration that has been created where the AMA is deployed. Then, we are going to modify the DCR configuration to be able to sent the network metadata from Vectra Stream to different custom tables."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Modify the syslog-ng configuration"", ""description"": ""_Note: A DCR cannot have more than 10 output flows. As we have 16 custom tables in this solution, we need to split the traffic to two DCR using syslog-ng._\n1. Download the modified syslog-ng configuration file: [azuremonitoragent-tcp.conf](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/syslog-ng/azuremonitoragent-tcp.conf).\n2. Log into the instance where syslog-ng/AMA is running.\n3. Browse to /etc/syslog-ng/conf.d/ and replace the content of _azuremonitoragent-tcp.conf_ file with the one that you just downloaded.\n4. Save and restart syslog-ng (_systemctl restart syslog-ng_)."", ""instructions"": []}, {""title"": ""Step 2. Modify the Data Collection rules configuration"", ""description"": ""_Note: The Data Collection Rules that have been created are located in Azure Monitor (**Monitor ---> Settings ---> Data Collection Rules**)_\n 1. Locate the 2 DCR that you created in Microsoft Sentinel.\n 2. Open the first DCR where Syslog facility is LOG_USER. Then go to Automation ---> Export template ---> Deploy --> Edit template.\n 3. Download the dataFlows configuration for LOG_USER DCR: [Stream_DataFlows_dcr1.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr1.json) and find/replace the destination placeholder '' with your workspace name.\n 4. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 5. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace (same as step 3).\n 6. Save --> Review + Create --> Create.\n 7. Open the second DCR than you created (Facilily is LOG_LOCAL0) and edit the template (Automation ---> Export template ---> Deploy --> Edit template).\n 8. Download the dataFlows configuration for LOG_LOCAL0 DCR: [Stream_DataFlows_dcr2.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr2.json) and find/replace the destination placeholder '' with your wokrspace name.\n 9. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 10. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace.\n 11. Save --> Review + Create --> Create."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Configure Vectra AI Stream"", ""description"": ""\nConfigure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via AMA.\n\nFrom the Vectra UI, navigate to Settings > Stream and Edit the destination configuration:\n\n 1. Select Publisher: RAW JSON\n 2. Set the server IP or hostname (which is the host whhere AMA is running)\n 3. Set all the port to **514**.\n 4. Save.""}, {""title"": ""Run the following command to validate (or set up) that syslog-ng is listening on port 514"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Stream configuration"", ""description"": ""must be configured to export Stream metadata in JSON""}, {""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" +"vectra_radius_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on a Kusto Function to work as expected which are deployed as part of the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** Vectra AI Stream connector is only available for **Linux** agents with **syslog-ng**. Make sure that syslog-ng is installed!\n\n In the first part, we are going to create the custom tables requires for this solution (using an ARM template). Then we are going to configure the Data Connector."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Create custom tables in Log Analytic Workspace (ARM Template)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fvectranetworks%2FMicrosoft_Sentinel%2Fmain%2FStream%2FAMA%2FARM_Templates%2Fazuredeploy_CustomTables_connector.json)\n2. Provide the required details such as the resource group and Microsoft Log Analytics Workspace (**the workspace must exist!**)\n4. Click **Review + Create** to deploy.\n\n\t_Note: Once deployed, you must be able to see the custom tables in your Log Analytic Workspace (Settings ---> Tables)._"", ""instructions"": []}, {""title"": ""Step 2. Install the Syslog via AMA Data connector"", ""description"": ""_Note: This is only required if it has not been install yet in Microsoft Sentinel._\n1. Microsoft Sentinel workspace ---> Content Management ---> Content Hub.\n\n2. Search for 'Syslog' (Provider is Microsoft) and select it.\n\n3. Check 'Install' buton on the bottom of the right panel."", ""instructions"": []}, {""title"": ""Step 3. Configure the Syslog via AMA data connector"", ""description"": ""_Note: Two different Data Collection Rules (DCR) are going to be created during this step_\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector.\n\n2. Search for 'Syslog via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs: LOG_USER/LOG_NOTICE and LOG_LOCAL0/LOG_NOTICE.\n\n4. Create a first DCR (Data Collection Rule). Specify a name. Then, in the Resources tab, select the instance where AMA is going to run. In the Collect tab, select LOG_USER/LOG_NOTICE.\n\n5. Create a second DCR. Specify a different name. Then, in the Resources tab, choose the same host. In the Collect tab, select LOG_LOCAL0/LOG_NOTICE\n\n\n\n\tNote:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""In the next section, we are goning to modify the syslog-ng configuration that has been created where the AMA is deployed. Then, we are going to modify the DCR configuration to be able to sent the network metadata from Vectra Stream to different custom tables."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Modify the syslog-ng configuration"", ""description"": ""_Note: A DCR cannot have more than 10 output flows. As we have 16 custom tables in this solution, we need to split the traffic to two DCR using syslog-ng._\n1. Download the modified syslog-ng configuration file: [azuremonitoragent-tcp.conf](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/syslog-ng/azuremonitoragent-tcp.conf).\n2. Log into the instance where syslog-ng/AMA is running.\n3. Browse to /etc/syslog-ng/conf.d/ and replace the content of _azuremonitoragent-tcp.conf_ file with the one that you just downloaded.\n4. Save and restart syslog-ng (_systemctl restart syslog-ng_)."", ""instructions"": []}, {""title"": ""Step 2. Modify the Data Collection rules configuration"", ""description"": ""_Note: The Data Collection Rules that have been created are located in Azure Monitor (**Monitor ---> Settings ---> Data Collection Rules**)_\n 1. Locate the 2 DCR that you created in Microsoft Sentinel.\n 2. Open the first DCR where Syslog facility is LOG_USER. Then go to Automation ---> Export template ---> Deploy --> Edit template.\n 3. Download the dataFlows configuration for LOG_USER DCR: [Stream_DataFlows_dcr1.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr1.json) and find/replace the destination placeholder '' with your workspace name.\n 4. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 5. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace (same as step 3).\n 6. Save --> Review + Create --> Create.\n 7. Open the second DCR than you created (Facilily is LOG_LOCAL0) and edit the template (Automation ---> Export template ---> Deploy --> Edit template).\n 8. Download the dataFlows configuration for LOG_LOCAL0 DCR: [Stream_DataFlows_dcr2.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr2.json) and find/replace the destination placeholder '' with your wokrspace name.\n 9. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 10. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace.\n 11. Save --> Review + Create --> Create."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Configure Vectra AI Stream"", ""description"": ""\nConfigure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via AMA.\n\nFrom the Vectra UI, navigate to Settings > Stream and Edit the destination configuration:\n\n 1. Select Publisher: RAW JSON\n 2. Set the server IP or hostname (which is the host whhere AMA is running)\n 3. Set all the port to **514**.\n 4. Save.""}, {""title"": ""Run the following command to validate (or set up) that syslog-ng is listening on port 514"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Stream configuration"", ""description"": ""must be configured to export Stream metadata in JSON""}, {""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" +"vectra_rdp_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on a Kusto Function to work as expected which are deployed as part of the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** Vectra AI Stream connector is only available for **Linux** agents with **syslog-ng**. Make sure that syslog-ng is installed!\n\n In the first part, we are going to create the custom tables requires for this solution (using an ARM template). Then we are going to configure the Data Connector."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Create custom tables in Log Analytic Workspace (ARM Template)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fvectranetworks%2FMicrosoft_Sentinel%2Fmain%2FStream%2FAMA%2FARM_Templates%2Fazuredeploy_CustomTables_connector.json)\n2. Provide the required details such as the resource group and Microsoft Log Analytics Workspace (**the workspace must exist!**)\n4. Click **Review + Create** to deploy.\n\n\t_Note: Once deployed, you must be able to see the custom tables in your Log Analytic Workspace (Settings ---> Tables)._"", ""instructions"": []}, {""title"": ""Step 2. Install the Syslog via AMA Data connector"", ""description"": ""_Note: This is only required if it has not been install yet in Microsoft Sentinel._\n1. Microsoft Sentinel workspace ---> Content Management ---> Content Hub.\n\n2. Search for 'Syslog' (Provider is Microsoft) and select it.\n\n3. Check 'Install' buton on the bottom of the right panel."", ""instructions"": []}, {""title"": ""Step 3. Configure the Syslog via AMA data connector"", ""description"": ""_Note: Two different Data Collection Rules (DCR) are going to be created during this step_\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector.\n\n2. Search for 'Syslog via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs: LOG_USER/LOG_NOTICE and LOG_LOCAL0/LOG_NOTICE.\n\n4. Create a first DCR (Data Collection Rule). Specify a name. Then, in the Resources tab, select the instance where AMA is going to run. In the Collect tab, select LOG_USER/LOG_NOTICE.\n\n5. Create a second DCR. Specify a different name. Then, in the Resources tab, choose the same host. In the Collect tab, select LOG_LOCAL0/LOG_NOTICE\n\n\n\n\tNote:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""In the next section, we are goning to modify the syslog-ng configuration that has been created where the AMA is deployed. Then, we are going to modify the DCR configuration to be able to sent the network metadata from Vectra Stream to different custom tables."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Modify the syslog-ng configuration"", ""description"": ""_Note: A DCR cannot have more than 10 output flows. As we have 16 custom tables in this solution, we need to split the traffic to two DCR using syslog-ng._\n1. Download the modified syslog-ng configuration file: [azuremonitoragent-tcp.conf](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/syslog-ng/azuremonitoragent-tcp.conf).\n2. Log into the instance where syslog-ng/AMA is running.\n3. Browse to /etc/syslog-ng/conf.d/ and replace the content of _azuremonitoragent-tcp.conf_ file with the one that you just downloaded.\n4. Save and restart syslog-ng (_systemctl restart syslog-ng_)."", ""instructions"": []}, {""title"": ""Step 2. Modify the Data Collection rules configuration"", ""description"": ""_Note: The Data Collection Rules that have been created are located in Azure Monitor (**Monitor ---> Settings ---> Data Collection Rules**)_\n 1. Locate the 2 DCR that you created in Microsoft Sentinel.\n 2. Open the first DCR where Syslog facility is LOG_USER. Then go to Automation ---> Export template ---> Deploy --> Edit template.\n 3. Download the dataFlows configuration for LOG_USER DCR: [Stream_DataFlows_dcr1.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr1.json) and find/replace the destination placeholder '' with your workspace name.\n 4. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 5. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace (same as step 3).\n 6. Save --> Review + Create --> Create.\n 7. Open the second DCR than you created (Facilily is LOG_LOCAL0) and edit the template (Automation ---> Export template ---> Deploy --> Edit template).\n 8. Download the dataFlows configuration for LOG_LOCAL0 DCR: [Stream_DataFlows_dcr2.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr2.json) and find/replace the destination placeholder '' with your wokrspace name.\n 9. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 10. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace.\n 11. Save --> Review + Create --> Create."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Configure Vectra AI Stream"", ""description"": ""\nConfigure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via AMA.\n\nFrom the Vectra UI, navigate to Settings > Stream and Edit the destination configuration:\n\n 1. Select Publisher: RAW JSON\n 2. Set the server IP or hostname (which is the host whhere AMA is running)\n 3. Set all the port to **514**.\n 4. Save.""}, {""title"": ""Run the following command to validate (or set up) that syslog-ng is listening on port 514"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Stream configuration"", ""description"": ""must be configured to export Stream metadata in JSON""}, {""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" +"vectra_smbfiles_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on a Kusto Function to work as expected which are deployed as part of the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** Vectra AI Stream connector is only available for **Linux** agents with **syslog-ng**. Make sure that syslog-ng is installed!\n\n In the first part, we are going to create the custom tables requires for this solution (using an ARM template). Then we are going to configure the Data Connector."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Create custom tables in Log Analytic Workspace (ARM Template)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fvectranetworks%2FMicrosoft_Sentinel%2Fmain%2FStream%2FAMA%2FARM_Templates%2Fazuredeploy_CustomTables_connector.json)\n2. Provide the required details such as the resource group and Microsoft Log Analytics Workspace (**the workspace must exist!**)\n4. Click **Review + Create** to deploy.\n\n\t_Note: Once deployed, you must be able to see the custom tables in your Log Analytic Workspace (Settings ---> Tables)._"", ""instructions"": []}, {""title"": ""Step 2. Install the Syslog via AMA Data connector"", ""description"": ""_Note: This is only required if it has not been install yet in Microsoft Sentinel._\n1. Microsoft Sentinel workspace ---> Content Management ---> Content Hub.\n\n2. Search for 'Syslog' (Provider is Microsoft) and select it.\n\n3. Check 'Install' buton on the bottom of the right panel."", ""instructions"": []}, {""title"": ""Step 3. Configure the Syslog via AMA data connector"", ""description"": ""_Note: Two different Data Collection Rules (DCR) are going to be created during this step_\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector.\n\n2. Search for 'Syslog via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs: LOG_USER/LOG_NOTICE and LOG_LOCAL0/LOG_NOTICE.\n\n4. Create a first DCR (Data Collection Rule). Specify a name. Then, in the Resources tab, select the instance where AMA is going to run. In the Collect tab, select LOG_USER/LOG_NOTICE.\n\n5. Create a second DCR. Specify a different name. Then, in the Resources tab, choose the same host. In the Collect tab, select LOG_LOCAL0/LOG_NOTICE\n\n\n\n\tNote:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""In the next section, we are goning to modify the syslog-ng configuration that has been created where the AMA is deployed. Then, we are going to modify the DCR configuration to be able to sent the network metadata from Vectra Stream to different custom tables."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Modify the syslog-ng configuration"", ""description"": ""_Note: A DCR cannot have more than 10 output flows. As we have 16 custom tables in this solution, we need to split the traffic to two DCR using syslog-ng._\n1. Download the modified syslog-ng configuration file: [azuremonitoragent-tcp.conf](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/syslog-ng/azuremonitoragent-tcp.conf).\n2. Log into the instance where syslog-ng/AMA is running.\n3. Browse to /etc/syslog-ng/conf.d/ and replace the content of _azuremonitoragent-tcp.conf_ file with the one that you just downloaded.\n4. Save and restart syslog-ng (_systemctl restart syslog-ng_)."", ""instructions"": []}, {""title"": ""Step 2. Modify the Data Collection rules configuration"", ""description"": ""_Note: The Data Collection Rules that have been created are located in Azure Monitor (**Monitor ---> Settings ---> Data Collection Rules**)_\n 1. Locate the 2 DCR that you created in Microsoft Sentinel.\n 2. Open the first DCR where Syslog facility is LOG_USER. Then go to Automation ---> Export template ---> Deploy --> Edit template.\n 3. Download the dataFlows configuration for LOG_USER DCR: [Stream_DataFlows_dcr1.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr1.json) and find/replace the destination placeholder '' with your workspace name.\n 4. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 5. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace (same as step 3).\n 6. Save --> Review + Create --> Create.\n 7. Open the second DCR than you created (Facilily is LOG_LOCAL0) and edit the template (Automation ---> Export template ---> Deploy --> Edit template).\n 8. Download the dataFlows configuration for LOG_LOCAL0 DCR: [Stream_DataFlows_dcr2.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr2.json) and find/replace the destination placeholder '' with your wokrspace name.\n 9. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 10. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace.\n 11. Save --> Review + Create --> Create."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Configure Vectra AI Stream"", ""description"": ""\nConfigure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via AMA.\n\nFrom the Vectra UI, navigate to Settings > Stream and Edit the destination configuration:\n\n 1. Select Publisher: RAW JSON\n 2. Set the server IP or hostname (which is the host whhere AMA is running)\n 3. Set all the port to **514**.\n 4. Save.""}, {""title"": ""Run the following command to validate (or set up) that syslog-ng is listening on port 514"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Stream configuration"", ""description"": ""must be configured to export Stream metadata in JSON""}, {""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" +"vectra_smbmapping_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on a Kusto Function to work as expected which are deployed as part of the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** Vectra AI Stream connector is only available for **Linux** agents with **syslog-ng**. Make sure that syslog-ng is installed!\n\n In the first part, we are going to create the custom tables requires for this solution (using an ARM template). Then we are going to configure the Data Connector."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Create custom tables in Log Analytic Workspace (ARM Template)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fvectranetworks%2FMicrosoft_Sentinel%2Fmain%2FStream%2FAMA%2FARM_Templates%2Fazuredeploy_CustomTables_connector.json)\n2. Provide the required details such as the resource group and Microsoft Log Analytics Workspace (**the workspace must exist!**)\n4. Click **Review + Create** to deploy.\n\n\t_Note: Once deployed, you must be able to see the custom tables in your Log Analytic Workspace (Settings ---> Tables)._"", ""instructions"": []}, {""title"": ""Step 2. Install the Syslog via AMA Data connector"", ""description"": ""_Note: This is only required if it has not been install yet in Microsoft Sentinel._\n1. Microsoft Sentinel workspace ---> Content Management ---> Content Hub.\n\n2. Search for 'Syslog' (Provider is Microsoft) and select it.\n\n3. Check 'Install' buton on the bottom of the right panel."", ""instructions"": []}, {""title"": ""Step 3. Configure the Syslog via AMA data connector"", ""description"": ""_Note: Two different Data Collection Rules (DCR) are going to be created during this step_\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector.\n\n2. Search for 'Syslog via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs: LOG_USER/LOG_NOTICE and LOG_LOCAL0/LOG_NOTICE.\n\n4. Create a first DCR (Data Collection Rule). Specify a name. Then, in the Resources tab, select the instance where AMA is going to run. In the Collect tab, select LOG_USER/LOG_NOTICE.\n\n5. Create a second DCR. Specify a different name. Then, in the Resources tab, choose the same host. In the Collect tab, select LOG_LOCAL0/LOG_NOTICE\n\n\n\n\tNote:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""In the next section, we are goning to modify the syslog-ng configuration that has been created where the AMA is deployed. Then, we are going to modify the DCR configuration to be able to sent the network metadata from Vectra Stream to different custom tables."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Modify the syslog-ng configuration"", ""description"": ""_Note: A DCR cannot have more than 10 output flows. As we have 16 custom tables in this solution, we need to split the traffic to two DCR using syslog-ng._\n1. Download the modified syslog-ng configuration file: [azuremonitoragent-tcp.conf](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/syslog-ng/azuremonitoragent-tcp.conf).\n2. Log into the instance where syslog-ng/AMA is running.\n3. Browse to /etc/syslog-ng/conf.d/ and replace the content of _azuremonitoragent-tcp.conf_ file with the one that you just downloaded.\n4. Save and restart syslog-ng (_systemctl restart syslog-ng_)."", ""instructions"": []}, {""title"": ""Step 2. Modify the Data Collection rules configuration"", ""description"": ""_Note: The Data Collection Rules that have been created are located in Azure Monitor (**Monitor ---> Settings ---> Data Collection Rules**)_\n 1. Locate the 2 DCR that you created in Microsoft Sentinel.\n 2. Open the first DCR where Syslog facility is LOG_USER. Then go to Automation ---> Export template ---> Deploy --> Edit template.\n 3. Download the dataFlows configuration for LOG_USER DCR: [Stream_DataFlows_dcr1.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr1.json) and find/replace the destination placeholder '' with your workspace name.\n 4. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 5. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace (same as step 3).\n 6. Save --> Review + Create --> Create.\n 7. Open the second DCR than you created (Facilily is LOG_LOCAL0) and edit the template (Automation ---> Export template ---> Deploy --> Edit template).\n 8. Download the dataFlows configuration for LOG_LOCAL0 DCR: [Stream_DataFlows_dcr2.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr2.json) and find/replace the destination placeholder '' with your wokrspace name.\n 9. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 10. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace.\n 11. Save --> Review + Create --> Create."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Configure Vectra AI Stream"", ""description"": ""\nConfigure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via AMA.\n\nFrom the Vectra UI, navigate to Settings > Stream and Edit the destination configuration:\n\n 1. Select Publisher: RAW JSON\n 2. Set the server IP or hostname (which is the host whhere AMA is running)\n 3. Set all the port to **514**.\n 4. Save.""}, {""title"": ""Run the following command to validate (or set up) that syslog-ng is listening on port 514"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Stream configuration"", ""description"": ""must be configured to export Stream metadata in JSON""}, {""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" +"vectra_smtp_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on a Kusto Function to work as expected which are deployed as part of the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** Vectra AI Stream connector is only available for **Linux** agents with **syslog-ng**. Make sure that syslog-ng is installed!\n\n In the first part, we are going to create the custom tables requires for this solution (using an ARM template). Then we are going to configure the Data Connector."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Create custom tables in Log Analytic Workspace (ARM Template)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fvectranetworks%2FMicrosoft_Sentinel%2Fmain%2FStream%2FAMA%2FARM_Templates%2Fazuredeploy_CustomTables_connector.json)\n2. Provide the required details such as the resource group and Microsoft Log Analytics Workspace (**the workspace must exist!**)\n4. Click **Review + Create** to deploy.\n\n\t_Note: Once deployed, you must be able to see the custom tables in your Log Analytic Workspace (Settings ---> Tables)._"", ""instructions"": []}, {""title"": ""Step 2. Install the Syslog via AMA Data connector"", ""description"": ""_Note: This is only required if it has not been install yet in Microsoft Sentinel._\n1. Microsoft Sentinel workspace ---> Content Management ---> Content Hub.\n\n2. Search for 'Syslog' (Provider is Microsoft) and select it.\n\n3. Check 'Install' buton on the bottom of the right panel."", ""instructions"": []}, {""title"": ""Step 3. Configure the Syslog via AMA data connector"", ""description"": ""_Note: Two different Data Collection Rules (DCR) are going to be created during this step_\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector.\n\n2. Search for 'Syslog via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs: LOG_USER/LOG_NOTICE and LOG_LOCAL0/LOG_NOTICE.\n\n4. Create a first DCR (Data Collection Rule). Specify a name. Then, in the Resources tab, select the instance where AMA is going to run. In the Collect tab, select LOG_USER/LOG_NOTICE.\n\n5. Create a second DCR. Specify a different name. Then, in the Resources tab, choose the same host. In the Collect tab, select LOG_LOCAL0/LOG_NOTICE\n\n\n\n\tNote:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""In the next section, we are goning to modify the syslog-ng configuration that has been created where the AMA is deployed. Then, we are going to modify the DCR configuration to be able to sent the network metadata from Vectra Stream to different custom tables."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Modify the syslog-ng configuration"", ""description"": ""_Note: A DCR cannot have more than 10 output flows. As we have 16 custom tables in this solution, we need to split the traffic to two DCR using syslog-ng._\n1. Download the modified syslog-ng configuration file: [azuremonitoragent-tcp.conf](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/syslog-ng/azuremonitoragent-tcp.conf).\n2. Log into the instance where syslog-ng/AMA is running.\n3. Browse to /etc/syslog-ng/conf.d/ and replace the content of _azuremonitoragent-tcp.conf_ file with the one that you just downloaded.\n4. Save and restart syslog-ng (_systemctl restart syslog-ng_)."", ""instructions"": []}, {""title"": ""Step 2. Modify the Data Collection rules configuration"", ""description"": ""_Note: The Data Collection Rules that have been created are located in Azure Monitor (**Monitor ---> Settings ---> Data Collection Rules**)_\n 1. Locate the 2 DCR that you created in Microsoft Sentinel.\n 2. Open the first DCR where Syslog facility is LOG_USER. Then go to Automation ---> Export template ---> Deploy --> Edit template.\n 3. Download the dataFlows configuration for LOG_USER DCR: [Stream_DataFlows_dcr1.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr1.json) and find/replace the destination placeholder '' with your workspace name.\n 4. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 5. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace (same as step 3).\n 6. Save --> Review + Create --> Create.\n 7. Open the second DCR than you created (Facilily is LOG_LOCAL0) and edit the template (Automation ---> Export template ---> Deploy --> Edit template).\n 8. Download the dataFlows configuration for LOG_LOCAL0 DCR: [Stream_DataFlows_dcr2.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr2.json) and find/replace the destination placeholder '' with your wokrspace name.\n 9. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 10. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace.\n 11. Save --> Review + Create --> Create."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Configure Vectra AI Stream"", ""description"": ""\nConfigure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via AMA.\n\nFrom the Vectra UI, navigate to Settings > Stream and Edit the destination configuration:\n\n 1. Select Publisher: RAW JSON\n 2. Set the server IP or hostname (which is the host whhere AMA is running)\n 3. Set all the port to **514**.\n 4. Save.""}, {""title"": ""Run the following command to validate (or set up) that syslog-ng is listening on port 514"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Stream configuration"", ""description"": ""must be configured to export Stream metadata in JSON""}, {""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" +"vectra_ssh_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on a Kusto Function to work as expected which are deployed as part of the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** Vectra AI Stream connector is only available for **Linux** agents with **syslog-ng**. Make sure that syslog-ng is installed!\n\n In the first part, we are going to create the custom tables requires for this solution (using an ARM template). Then we are going to configure the Data Connector."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Create custom tables in Log Analytic Workspace (ARM Template)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fvectranetworks%2FMicrosoft_Sentinel%2Fmain%2FStream%2FAMA%2FARM_Templates%2Fazuredeploy_CustomTables_connector.json)\n2. Provide the required details such as the resource group and Microsoft Log Analytics Workspace (**the workspace must exist!**)\n4. Click **Review + Create** to deploy.\n\n\t_Note: Once deployed, you must be able to see the custom tables in your Log Analytic Workspace (Settings ---> Tables)._"", ""instructions"": []}, {""title"": ""Step 2. Install the Syslog via AMA Data connector"", ""description"": ""_Note: This is only required if it has not been install yet in Microsoft Sentinel._\n1. Microsoft Sentinel workspace ---> Content Management ---> Content Hub.\n\n2. Search for 'Syslog' (Provider is Microsoft) and select it.\n\n3. Check 'Install' buton on the bottom of the right panel."", ""instructions"": []}, {""title"": ""Step 3. Configure the Syslog via AMA data connector"", ""description"": ""_Note: Two different Data Collection Rules (DCR) are going to be created during this step_\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector.\n\n2. Search for 'Syslog via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs: LOG_USER/LOG_NOTICE and LOG_LOCAL0/LOG_NOTICE.\n\n4. Create a first DCR (Data Collection Rule). Specify a name. Then, in the Resources tab, select the instance where AMA is going to run. In the Collect tab, select LOG_USER/LOG_NOTICE.\n\n5. Create a second DCR. Specify a different name. Then, in the Resources tab, choose the same host. In the Collect tab, select LOG_LOCAL0/LOG_NOTICE\n\n\n\n\tNote:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""In the next section, we are goning to modify the syslog-ng configuration that has been created where the AMA is deployed. Then, we are going to modify the DCR configuration to be able to sent the network metadata from Vectra Stream to different custom tables."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Modify the syslog-ng configuration"", ""description"": ""_Note: A DCR cannot have more than 10 output flows. As we have 16 custom tables in this solution, we need to split the traffic to two DCR using syslog-ng._\n1. Download the modified syslog-ng configuration file: [azuremonitoragent-tcp.conf](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/syslog-ng/azuremonitoragent-tcp.conf).\n2. Log into the instance where syslog-ng/AMA is running.\n3. Browse to /etc/syslog-ng/conf.d/ and replace the content of _azuremonitoragent-tcp.conf_ file with the one that you just downloaded.\n4. Save and restart syslog-ng (_systemctl restart syslog-ng_)."", ""instructions"": []}, {""title"": ""Step 2. Modify the Data Collection rules configuration"", ""description"": ""_Note: The Data Collection Rules that have been created are located in Azure Monitor (**Monitor ---> Settings ---> Data Collection Rules**)_\n 1. Locate the 2 DCR that you created in Microsoft Sentinel.\n 2. Open the first DCR where Syslog facility is LOG_USER. Then go to Automation ---> Export template ---> Deploy --> Edit template.\n 3. Download the dataFlows configuration for LOG_USER DCR: [Stream_DataFlows_dcr1.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr1.json) and find/replace the destination placeholder '' with your workspace name.\n 4. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 5. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace (same as step 3).\n 6. Save --> Review + Create --> Create.\n 7. Open the second DCR than you created (Facilily is LOG_LOCAL0) and edit the template (Automation ---> Export template ---> Deploy --> Edit template).\n 8. Download the dataFlows configuration for LOG_LOCAL0 DCR: [Stream_DataFlows_dcr2.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr2.json) and find/replace the destination placeholder '' with your wokrspace name.\n 9. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 10. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace.\n 11. Save --> Review + Create --> Create."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Configure Vectra AI Stream"", ""description"": ""\nConfigure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via AMA.\n\nFrom the Vectra UI, navigate to Settings > Stream and Edit the destination configuration:\n\n 1. Select Publisher: RAW JSON\n 2. Set the server IP or hostname (which is the host whhere AMA is running)\n 3. Set all the port to **514**.\n 4. Save.""}, {""title"": ""Run the following command to validate (or set up) that syslog-ng is listening on port 514"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Stream configuration"", ""description"": ""must be configured to export Stream metadata in JSON""}, {""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" +"vectra_ssl_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on a Kusto Function to work as expected which are deployed as part of the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** Vectra AI Stream connector is only available for **Linux** agents with **syslog-ng**. Make sure that syslog-ng is installed!\n\n In the first part, we are going to create the custom tables requires for this solution (using an ARM template). Then we are going to configure the Data Connector."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Create custom tables in Log Analytic Workspace (ARM Template)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fvectranetworks%2FMicrosoft_Sentinel%2Fmain%2FStream%2FAMA%2FARM_Templates%2Fazuredeploy_CustomTables_connector.json)\n2. Provide the required details such as the resource group and Microsoft Log Analytics Workspace (**the workspace must exist!**)\n4. Click **Review + Create** to deploy.\n\n\t_Note: Once deployed, you must be able to see the custom tables in your Log Analytic Workspace (Settings ---> Tables)._"", ""instructions"": []}, {""title"": ""Step 2. Install the Syslog via AMA Data connector"", ""description"": ""_Note: This is only required if it has not been install yet in Microsoft Sentinel._\n1. Microsoft Sentinel workspace ---> Content Management ---> Content Hub.\n\n2. Search for 'Syslog' (Provider is Microsoft) and select it.\n\n3. Check 'Install' buton on the bottom of the right panel."", ""instructions"": []}, {""title"": ""Step 3. Configure the Syslog via AMA data connector"", ""description"": ""_Note: Two different Data Collection Rules (DCR) are going to be created during this step_\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector.\n\n2. Search for 'Syslog via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs: LOG_USER/LOG_NOTICE and LOG_LOCAL0/LOG_NOTICE.\n\n4. Create a first DCR (Data Collection Rule). Specify a name. Then, in the Resources tab, select the instance where AMA is going to run. In the Collect tab, select LOG_USER/LOG_NOTICE.\n\n5. Create a second DCR. Specify a different name. Then, in the Resources tab, choose the same host. In the Collect tab, select LOG_LOCAL0/LOG_NOTICE\n\n\n\n\tNote:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""In the next section, we are goning to modify the syslog-ng configuration that has been created where the AMA is deployed. Then, we are going to modify the DCR configuration to be able to sent the network metadata from Vectra Stream to different custom tables."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Modify the syslog-ng configuration"", ""description"": ""_Note: A DCR cannot have more than 10 output flows. As we have 16 custom tables in this solution, we need to split the traffic to two DCR using syslog-ng._\n1. Download the modified syslog-ng configuration file: [azuremonitoragent-tcp.conf](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/syslog-ng/azuremonitoragent-tcp.conf).\n2. Log into the instance where syslog-ng/AMA is running.\n3. Browse to /etc/syslog-ng/conf.d/ and replace the content of _azuremonitoragent-tcp.conf_ file with the one that you just downloaded.\n4. Save and restart syslog-ng (_systemctl restart syslog-ng_)."", ""instructions"": []}, {""title"": ""Step 2. Modify the Data Collection rules configuration"", ""description"": ""_Note: The Data Collection Rules that have been created are located in Azure Monitor (**Monitor ---> Settings ---> Data Collection Rules**)_\n 1. Locate the 2 DCR that you created in Microsoft Sentinel.\n 2. Open the first DCR where Syslog facility is LOG_USER. Then go to Automation ---> Export template ---> Deploy --> Edit template.\n 3. Download the dataFlows configuration for LOG_USER DCR: [Stream_DataFlows_dcr1.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr1.json) and find/replace the destination placeholder '' with your workspace name.\n 4. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 5. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace (same as step 3).\n 6. Save --> Review + Create --> Create.\n 7. Open the second DCR than you created (Facilily is LOG_LOCAL0) and edit the template (Automation ---> Export template ---> Deploy --> Edit template).\n 8. Download the dataFlows configuration for LOG_LOCAL0 DCR: [Stream_DataFlows_dcr2.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr2.json) and find/replace the destination placeholder '' with your wokrspace name.\n 9. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 10. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace.\n 11. Save --> Review + Create --> Create."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Configure Vectra AI Stream"", ""description"": ""\nConfigure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via AMA.\n\nFrom the Vectra UI, navigate to Settings > Stream and Edit the destination configuration:\n\n 1. Select Publisher: RAW JSON\n 2. Set the server IP or hostname (which is the host whhere AMA is running)\n 3. Set all the port to **514**.\n 4. Save.""}, {""title"": ""Run the following command to validate (or set up) that syslog-ng is listening on port 514"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Stream configuration"", ""description"": ""must be configured to export Stream metadata in JSON""}, {""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" +"vectra_x509_CL","Vectra AI Stream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream","vectraaiinc","vectra_sentinel_solution","2021-10-18","2024-05-02","","Vectra AI","Partner","https://www.vectra.ai/support","","domains","VectraStreamAma","Vectra AI","[Recommended] Vectra AI Stream via AMA","The Vectra AI Stream connector allows to send Network Metadata collected by Vectra Sensors accross the Network and Cloud to Microsoft Sentinel","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on parsers based on a Kusto Function to work as expected which are deployed as part of the Microsoft Sentinel Solution."", ""instructions"": []}, {""title"": """", ""description"": "">**IMPORTANT:** Vectra AI Stream connector is only available for **Linux** agents with **syslog-ng**. Make sure that syslog-ng is installed!\n\n In the first part, we are going to create the custom tables requires for this solution (using an ARM template). Then we are going to configure the Data Connector."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Create custom tables in Log Analytic Workspace (ARM Template)"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fvectranetworks%2FMicrosoft_Sentinel%2Fmain%2FStream%2FAMA%2FARM_Templates%2Fazuredeploy_CustomTables_connector.json)\n2. Provide the required details such as the resource group and Microsoft Log Analytics Workspace (**the workspace must exist!**)\n4. Click **Review + Create** to deploy.\n\n\t_Note: Once deployed, you must be able to see the custom tables in your Log Analytic Workspace (Settings ---> Tables)._"", ""instructions"": []}, {""title"": ""Step 2. Install the Syslog via AMA Data connector"", ""description"": ""_Note: This is only required if it has not been install yet in Microsoft Sentinel._\n1. Microsoft Sentinel workspace ---> Content Management ---> Content Hub.\n\n2. Search for 'Syslog' (Provider is Microsoft) and select it.\n\n3. Check 'Install' buton on the bottom of the right panel."", ""instructions"": []}, {""title"": ""Step 3. Configure the Syslog via AMA data connector"", ""description"": ""_Note: Two different Data Collection Rules (DCR) are going to be created during this step_\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector.\n\n2. Search for 'Syslog via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs: LOG_USER/LOG_NOTICE and LOG_LOCAL0/LOG_NOTICE.\n\n4. Create a first DCR (Data Collection Rule). Specify a name. Then, in the Resources tab, select the instance where AMA is going to run. In the Collect tab, select LOG_USER/LOG_NOTICE.\n\n5. Create a second DCR. Specify a different name. Then, in the Resources tab, choose the same host. In the Collect tab, select LOG_LOCAL0/LOG_NOTICE\n\n\n\n\tNote:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": """", ""description"": ""In the next section, we are goning to modify the syslog-ng configuration that has been created where the AMA is deployed. Then, we are going to modify the DCR configuration to be able to sent the network metadata from Vectra Stream to different custom tables."", ""instructions"": [{""parameters"": {""title"": ""Please proceed with these steps:"", ""instructionSteps"": [{""title"": ""Step 1. Modify the syslog-ng configuration"", ""description"": ""_Note: A DCR cannot have more than 10 output flows. As we have 16 custom tables in this solution, we need to split the traffic to two DCR using syslog-ng._\n1. Download the modified syslog-ng configuration file: [azuremonitoragent-tcp.conf](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/syslog-ng/azuremonitoragent-tcp.conf).\n2. Log into the instance where syslog-ng/AMA is running.\n3. Browse to /etc/syslog-ng/conf.d/ and replace the content of _azuremonitoragent-tcp.conf_ file with the one that you just downloaded.\n4. Save and restart syslog-ng (_systemctl restart syslog-ng_)."", ""instructions"": []}, {""title"": ""Step 2. Modify the Data Collection rules configuration"", ""description"": ""_Note: The Data Collection Rules that have been created are located in Azure Monitor (**Monitor ---> Settings ---> Data Collection Rules**)_\n 1. Locate the 2 DCR that you created in Microsoft Sentinel.\n 2. Open the first DCR where Syslog facility is LOG_USER. Then go to Automation ---> Export template ---> Deploy --> Edit template.\n 3. Download the dataFlows configuration for LOG_USER DCR: [Stream_DataFlows_dcr1.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr1.json) and find/replace the destination placeholder '' with your workspace name.\n 4. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 5. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace (same as step 3).\n 6. Save --> Review + Create --> Create.\n 7. Open the second DCR than you created (Facilily is LOG_LOCAL0) and edit the template (Automation ---> Export template ---> Deploy --> Edit template).\n 8. Download the dataFlows configuration for LOG_LOCAL0 DCR: [Stream_DataFlows_dcr2.json](https://raw.githubusercontent.com/vectranetworks/Microsoft_Sentinel/main/Stream/AMA/dcr/Stream_DataFlows_dcr2.json) and find/replace the destination placeholder '' with your wokrspace name.\n 9. Locate the dataFlows section in the template (Azure Monitor) and replace it with the content of the configuration you downloaded.\n 10. In the same DCR, locate the key: resources -> properties -> destinations -> name and replace 'DataCollectionEvent' with the name of the Log Analytics Workspace.\n 11. Save --> Review + Create --> Create."", ""instructions"": []}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""Configure Vectra AI Stream"", ""description"": ""\nConfigure Vectra AI Brain to forward Stream metadata in JSON format to your Microsoft Sentinel workspace via AMA.\n\nFrom the Vectra UI, navigate to Settings > Stream and Edit the destination configuration:\n\n 1. Select Publisher: RAW JSON\n 2. Set the server IP or hostname (which is the host whhere AMA is running)\n 3. Set all the port to **514**.\n 4. Save.""}, {""title"": ""Run the following command to validate (or set up) that syslog-ng is listening on port 514"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": """", ""value"": ""sudo wget -O Forwarder_AMA_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Forwarder_AMA_installer.py&&sudo python Forwarder_AMA_installer.py""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Vectra AI Stream configuration"", ""description"": ""must be configured to export Stream metadata in JSON""}, {""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20AI%20Stream/Data%20Connectors/template_VectraStreamAma.json","true" +"Audits_Data_CL","Vectra XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20XDR","vectraaiinc","vectra-xdr-for-microsoft-sentinel","2023-07-04","2024-08-01","","Vectra Support","Partner","https://www.vectra.ai/support","","domains","VectraXDR","Vectra","Vectra XDR","The [Vectra XDR](https://www.vectra.ai/) connector gives the capability to ingest Vectra Detections, Audits, Entity Scoring, Lockdown, Health and Entities data into Microsoft Sentinel through the Vectra REST API. Refer to the API documentation: `https://support.vectra.ai/s/article/KB-VS-1666` for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Vectra API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. Follow these steps for [Detections Parser](https://aka.ms/sentinel-VectraDetections-parser), [Audits Parser](https://aka.ms/sentinel-VectraAudits-parser), [Entity Scoring Parser](https://aka.ms/sentinel-VectraEntityScoring-parser), [Lockdown Parser](https://aka.ms/sentinel-VectraLockdown-parser) and [Health Parser](https://aka.ms/sentinel-VectraHealth-parser) to create the Kusto functions alias, **VectraDetections**, **VectraAudits**, **VectraEntityScoring**, **VectraLockdown** and **VectraHealth**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Vectra API Credentials**\n\n Follow these instructions to create a Vectra Client ID and Client Secret.\n 1. Log into your Vectra portal\n 2. Navigate to Manage -> API Clients\n 3. From the API Clients page, select 'Add API Client' to create a new client.\n 4. Add Client Name, select Role and click on Generate Credentials to obtain your client credentials. \n 5. Be sure to record your Client ID and Secret Key for safekeeping. You will need these two pieces of information to obtain an access token from the Vectra API. An access token is required to make requests to all of the Vectra API endpoints.""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Vectra Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of Vectra Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Vectra Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Create a Keyvault**\n\n Follow these instructions to create a new Keyvault.\n 1. In the Azure portal, Go to **Key vaults** and click on Create.\n 2. Select Subsciption, Resource Group and provide unique name of keyvault.""}, {""title"": """", ""description"": ""**STEP 7 - Create Access Policy in Keyvault**\n\n Follow these instructions to create access policy in Keyvault.\n 1. Go to keyvaults, select your keyvault, go to Access policies on left side panel, click on create.\n 2. Select all keys & secrets permissions. Click next.\n 3. In the principal section, search by application name which was generated in STEP - 2. Click next.\n\n **Note: **Ensure the Permission model in the Access Configuration of Key Vault is set to **'Vault access policy'**""}, {""title"": """", ""description"": ""**STEP 8 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Vectra data connector, have the Vectra API Authorization Credentials readily available..""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Vectra connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-VectraXDRAPI-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-VectraXDRAPI-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace Name \n\t\tVectra Base URL (https://) \n\t\tVectra Client Id - Health \n\t\tVectra Client Secret Key - Health \n\t\tVectra Client Id - Entity Scoring \n\t\tVectra Client Secret - Entity Scoring \n\t\tVectra Client Id - Detections \n\t\tVectra Client Secret - Detections \n\t\tVectra Client Id - Audits \n\t\tVectra Client Secret - Audits \n\t\tVectra Client Id - Lockdown \n\t\tVectra Client Secret - Lockdown \n\t\tVectra Client Id - Host-Entity \n\t\tVectra Client Secret - Host-Entity \n\t\tVectra Client Id - Account-Entity \n\t\tVectra Client Secret - Account-Entity \n\t\tKey Vault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tTenant Id \n\t\tAzure Entra ObjectID \n\t\tStartTime (in MM/DD/YYYY HH:MM:SS Format) \n\t\tInclude Score Decrease \n\t\tAudits Table Name \n\t\tDetections Table Name \n\t\tEntity Scoring Table Name \n\t\tLockdown Table Name \n\t\tHealth Table Name \n\t\tEntities Table Name \n\t\tExclude Group Details From Detections\n\t\tLog Level (Default: INFO) \n\t\tLockdown Schedule \n\t\tHealth Schedule \n\t\tDetections Schedule \n\t\tAudits Schedule \n\t\tEntity Scoring Schedule \n\t\tEntities Schedule \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Vectra data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-VectraXDR320-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. VECTRAXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tVectra Base URL (https://) \n\t\tVectra Client Id - Health \n\t\tVectra Client Secret Key - Health \n\t\tVectra Client Id - Entity Scoring \n\t\tVectra Client Secret - Entity Scoring \n\t\tVectra Client Id - Detections \n\t\tVectra Client Secret - Detections \n\t\tVectra Client Id - Audits \n\t\tVectra Client Secret - Audits \n\t\tVectra Client Id - Lockdown \n\t\tVectra Client Secret - Lockdown \n\t\tVectra Client Id - Host-Entity \n\t\tVectra Client Secret - Host-Entity \n\t\tVectra Client Id - Account-Entity \n\t\tVectra Client Secret - Account-Entity \n\t\tKey Vault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tTenant Id \n\t\tStartTime (in MM/DD/YYYY HH:MM:SS Format) \n\t\tInclude Score Decrease \n\t\tAudits Table Name \n\t\tDetections Table Name \n\t\tEntity Scoring Table Name \n\t\tLockdown Table Name \n\t\tHealth Table Name \n\t\tEntities Table Name \n\t\tLog Level (Default: INFO) \n\t\tLockdown Schedule \n\t\tHealth Schedule \n\t\tDetections Schedule \n\t\tAudits Schedule \n\t\tEntity Scoring Schedule \n\t\tEntities Schedule \n\t\tlogAnalyticsUri (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Vectra Client ID** and **Client Secret** is required for Health, Entity Scoring, Entities, Detections, Lockdown and Audit data collection. See the documentation to learn more about API on the `https://support.vectra.ai/s/article/KB-VS-1666`.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20XDR/Data%20Connectors/VectraDataConnector/VectraXDR_API_FunctionApp.json","true" +"Detections_Data_CL","Vectra XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20XDR","vectraaiinc","vectra-xdr-for-microsoft-sentinel","2023-07-04","2024-08-01","","Vectra Support","Partner","https://www.vectra.ai/support","","domains","VectraXDR","Vectra","Vectra XDR","The [Vectra XDR](https://www.vectra.ai/) connector gives the capability to ingest Vectra Detections, Audits, Entity Scoring, Lockdown, Health and Entities data into Microsoft Sentinel through the Vectra REST API. Refer to the API documentation: `https://support.vectra.ai/s/article/KB-VS-1666` for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Vectra API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. Follow these steps for [Detections Parser](https://aka.ms/sentinel-VectraDetections-parser), [Audits Parser](https://aka.ms/sentinel-VectraAudits-parser), [Entity Scoring Parser](https://aka.ms/sentinel-VectraEntityScoring-parser), [Lockdown Parser](https://aka.ms/sentinel-VectraLockdown-parser) and [Health Parser](https://aka.ms/sentinel-VectraHealth-parser) to create the Kusto functions alias, **VectraDetections**, **VectraAudits**, **VectraEntityScoring**, **VectraLockdown** and **VectraHealth**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Vectra API Credentials**\n\n Follow these instructions to create a Vectra Client ID and Client Secret.\n 1. Log into your Vectra portal\n 2. Navigate to Manage -> API Clients\n 3. From the API Clients page, select 'Add API Client' to create a new client.\n 4. Add Client Name, select Role and click on Generate Credentials to obtain your client credentials. \n 5. Be sure to record your Client ID and Secret Key for safekeeping. You will need these two pieces of information to obtain an access token from the Vectra API. An access token is required to make requests to all of the Vectra API endpoints.""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Vectra Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of Vectra Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Vectra Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Create a Keyvault**\n\n Follow these instructions to create a new Keyvault.\n 1. In the Azure portal, Go to **Key vaults** and click on Create.\n 2. Select Subsciption, Resource Group and provide unique name of keyvault.""}, {""title"": """", ""description"": ""**STEP 7 - Create Access Policy in Keyvault**\n\n Follow these instructions to create access policy in Keyvault.\n 1. Go to keyvaults, select your keyvault, go to Access policies on left side panel, click on create.\n 2. Select all keys & secrets permissions. Click next.\n 3. In the principal section, search by application name which was generated in STEP - 2. Click next.\n\n **Note: **Ensure the Permission model in the Access Configuration of Key Vault is set to **'Vault access policy'**""}, {""title"": """", ""description"": ""**STEP 8 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Vectra data connector, have the Vectra API Authorization Credentials readily available..""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Vectra connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-VectraXDRAPI-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-VectraXDRAPI-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace Name \n\t\tVectra Base URL (https://) \n\t\tVectra Client Id - Health \n\t\tVectra Client Secret Key - Health \n\t\tVectra Client Id - Entity Scoring \n\t\tVectra Client Secret - Entity Scoring \n\t\tVectra Client Id - Detections \n\t\tVectra Client Secret - Detections \n\t\tVectra Client Id - Audits \n\t\tVectra Client Secret - Audits \n\t\tVectra Client Id - Lockdown \n\t\tVectra Client Secret - Lockdown \n\t\tVectra Client Id - Host-Entity \n\t\tVectra Client Secret - Host-Entity \n\t\tVectra Client Id - Account-Entity \n\t\tVectra Client Secret - Account-Entity \n\t\tKey Vault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tTenant Id \n\t\tAzure Entra ObjectID \n\t\tStartTime (in MM/DD/YYYY HH:MM:SS Format) \n\t\tInclude Score Decrease \n\t\tAudits Table Name \n\t\tDetections Table Name \n\t\tEntity Scoring Table Name \n\t\tLockdown Table Name \n\t\tHealth Table Name \n\t\tEntities Table Name \n\t\tExclude Group Details From Detections\n\t\tLog Level (Default: INFO) \n\t\tLockdown Schedule \n\t\tHealth Schedule \n\t\tDetections Schedule \n\t\tAudits Schedule \n\t\tEntity Scoring Schedule \n\t\tEntities Schedule \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Vectra data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-VectraXDR320-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. VECTRAXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tVectra Base URL (https://) \n\t\tVectra Client Id - Health \n\t\tVectra Client Secret Key - Health \n\t\tVectra Client Id - Entity Scoring \n\t\tVectra Client Secret - Entity Scoring \n\t\tVectra Client Id - Detections \n\t\tVectra Client Secret - Detections \n\t\tVectra Client Id - Audits \n\t\tVectra Client Secret - Audits \n\t\tVectra Client Id - Lockdown \n\t\tVectra Client Secret - Lockdown \n\t\tVectra Client Id - Host-Entity \n\t\tVectra Client Secret - Host-Entity \n\t\tVectra Client Id - Account-Entity \n\t\tVectra Client Secret - Account-Entity \n\t\tKey Vault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tTenant Id \n\t\tStartTime (in MM/DD/YYYY HH:MM:SS Format) \n\t\tInclude Score Decrease \n\t\tAudits Table Name \n\t\tDetections Table Name \n\t\tEntity Scoring Table Name \n\t\tLockdown Table Name \n\t\tHealth Table Name \n\t\tEntities Table Name \n\t\tLog Level (Default: INFO) \n\t\tLockdown Schedule \n\t\tHealth Schedule \n\t\tDetections Schedule \n\t\tAudits Schedule \n\t\tEntity Scoring Schedule \n\t\tEntities Schedule \n\t\tlogAnalyticsUri (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Vectra Client ID** and **Client Secret** is required for Health, Entity Scoring, Entities, Detections, Lockdown and Audit data collection. See the documentation to learn more about API on the `https://support.vectra.ai/s/article/KB-VS-1666`.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20XDR/Data%20Connectors/VectraDataConnector/VectraXDR_API_FunctionApp.json","true" +"Entities_Data_CL","Vectra XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20XDR","vectraaiinc","vectra-xdr-for-microsoft-sentinel","2023-07-04","2024-08-01","","Vectra Support","Partner","https://www.vectra.ai/support","","domains","VectraXDR","Vectra","Vectra XDR","The [Vectra XDR](https://www.vectra.ai/) connector gives the capability to ingest Vectra Detections, Audits, Entity Scoring, Lockdown, Health and Entities data into Microsoft Sentinel through the Vectra REST API. Refer to the API documentation: `https://support.vectra.ai/s/article/KB-VS-1666` for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Vectra API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. Follow these steps for [Detections Parser](https://aka.ms/sentinel-VectraDetections-parser), [Audits Parser](https://aka.ms/sentinel-VectraAudits-parser), [Entity Scoring Parser](https://aka.ms/sentinel-VectraEntityScoring-parser), [Lockdown Parser](https://aka.ms/sentinel-VectraLockdown-parser) and [Health Parser](https://aka.ms/sentinel-VectraHealth-parser) to create the Kusto functions alias, **VectraDetections**, **VectraAudits**, **VectraEntityScoring**, **VectraLockdown** and **VectraHealth**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Vectra API Credentials**\n\n Follow these instructions to create a Vectra Client ID and Client Secret.\n 1. Log into your Vectra portal\n 2. Navigate to Manage -> API Clients\n 3. From the API Clients page, select 'Add API Client' to create a new client.\n 4. Add Client Name, select Role and click on Generate Credentials to obtain your client credentials. \n 5. Be sure to record your Client ID and Secret Key for safekeeping. You will need these two pieces of information to obtain an access token from the Vectra API. An access token is required to make requests to all of the Vectra API endpoints.""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Vectra Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of Vectra Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Vectra Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Create a Keyvault**\n\n Follow these instructions to create a new Keyvault.\n 1. In the Azure portal, Go to **Key vaults** and click on Create.\n 2. Select Subsciption, Resource Group and provide unique name of keyvault.""}, {""title"": """", ""description"": ""**STEP 7 - Create Access Policy in Keyvault**\n\n Follow these instructions to create access policy in Keyvault.\n 1. Go to keyvaults, select your keyvault, go to Access policies on left side panel, click on create.\n 2. Select all keys & secrets permissions. Click next.\n 3. In the principal section, search by application name which was generated in STEP - 2. Click next.\n\n **Note: **Ensure the Permission model in the Access Configuration of Key Vault is set to **'Vault access policy'**""}, {""title"": """", ""description"": ""**STEP 8 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Vectra data connector, have the Vectra API Authorization Credentials readily available..""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Vectra connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-VectraXDRAPI-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-VectraXDRAPI-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace Name \n\t\tVectra Base URL (https://) \n\t\tVectra Client Id - Health \n\t\tVectra Client Secret Key - Health \n\t\tVectra Client Id - Entity Scoring \n\t\tVectra Client Secret - Entity Scoring \n\t\tVectra Client Id - Detections \n\t\tVectra Client Secret - Detections \n\t\tVectra Client Id - Audits \n\t\tVectra Client Secret - Audits \n\t\tVectra Client Id - Lockdown \n\t\tVectra Client Secret - Lockdown \n\t\tVectra Client Id - Host-Entity \n\t\tVectra Client Secret - Host-Entity \n\t\tVectra Client Id - Account-Entity \n\t\tVectra Client Secret - Account-Entity \n\t\tKey Vault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tTenant Id \n\t\tAzure Entra ObjectID \n\t\tStartTime (in MM/DD/YYYY HH:MM:SS Format) \n\t\tInclude Score Decrease \n\t\tAudits Table Name \n\t\tDetections Table Name \n\t\tEntity Scoring Table Name \n\t\tLockdown Table Name \n\t\tHealth Table Name \n\t\tEntities Table Name \n\t\tExclude Group Details From Detections\n\t\tLog Level (Default: INFO) \n\t\tLockdown Schedule \n\t\tHealth Schedule \n\t\tDetections Schedule \n\t\tAudits Schedule \n\t\tEntity Scoring Schedule \n\t\tEntities Schedule \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Vectra data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-VectraXDR320-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. VECTRAXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tVectra Base URL (https://) \n\t\tVectra Client Id - Health \n\t\tVectra Client Secret Key - Health \n\t\tVectra Client Id - Entity Scoring \n\t\tVectra Client Secret - Entity Scoring \n\t\tVectra Client Id - Detections \n\t\tVectra Client Secret - Detections \n\t\tVectra Client Id - Audits \n\t\tVectra Client Secret - Audits \n\t\tVectra Client Id - Lockdown \n\t\tVectra Client Secret - Lockdown \n\t\tVectra Client Id - Host-Entity \n\t\tVectra Client Secret - Host-Entity \n\t\tVectra Client Id - Account-Entity \n\t\tVectra Client Secret - Account-Entity \n\t\tKey Vault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tTenant Id \n\t\tStartTime (in MM/DD/YYYY HH:MM:SS Format) \n\t\tInclude Score Decrease \n\t\tAudits Table Name \n\t\tDetections Table Name \n\t\tEntity Scoring Table Name \n\t\tLockdown Table Name \n\t\tHealth Table Name \n\t\tEntities Table Name \n\t\tLog Level (Default: INFO) \n\t\tLockdown Schedule \n\t\tHealth Schedule \n\t\tDetections Schedule \n\t\tAudits Schedule \n\t\tEntity Scoring Schedule \n\t\tEntities Schedule \n\t\tlogAnalyticsUri (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Vectra Client ID** and **Client Secret** is required for Health, Entity Scoring, Entities, Detections, Lockdown and Audit data collection. See the documentation to learn more about API on the `https://support.vectra.ai/s/article/KB-VS-1666`.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20XDR/Data%20Connectors/VectraDataConnector/VectraXDR_API_FunctionApp.json","true" +"Entity_Scoring_Data_CL","Vectra XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20XDR","vectraaiinc","vectra-xdr-for-microsoft-sentinel","2023-07-04","2024-08-01","","Vectra Support","Partner","https://www.vectra.ai/support","","domains","VectraXDR","Vectra","Vectra XDR","The [Vectra XDR](https://www.vectra.ai/) connector gives the capability to ingest Vectra Detections, Audits, Entity Scoring, Lockdown, Health and Entities data into Microsoft Sentinel through the Vectra REST API. Refer to the API documentation: `https://support.vectra.ai/s/article/KB-VS-1666` for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Vectra API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. Follow these steps for [Detections Parser](https://aka.ms/sentinel-VectraDetections-parser), [Audits Parser](https://aka.ms/sentinel-VectraAudits-parser), [Entity Scoring Parser](https://aka.ms/sentinel-VectraEntityScoring-parser), [Lockdown Parser](https://aka.ms/sentinel-VectraLockdown-parser) and [Health Parser](https://aka.ms/sentinel-VectraHealth-parser) to create the Kusto functions alias, **VectraDetections**, **VectraAudits**, **VectraEntityScoring**, **VectraLockdown** and **VectraHealth**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Vectra API Credentials**\n\n Follow these instructions to create a Vectra Client ID and Client Secret.\n 1. Log into your Vectra portal\n 2. Navigate to Manage -> API Clients\n 3. From the API Clients page, select 'Add API Client' to create a new client.\n 4. Add Client Name, select Role and click on Generate Credentials to obtain your client credentials. \n 5. Be sure to record your Client ID and Secret Key for safekeeping. You will need these two pieces of information to obtain an access token from the Vectra API. An access token is required to make requests to all of the Vectra API endpoints.""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Vectra Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of Vectra Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Vectra Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Create a Keyvault**\n\n Follow these instructions to create a new Keyvault.\n 1. In the Azure portal, Go to **Key vaults** and click on Create.\n 2. Select Subsciption, Resource Group and provide unique name of keyvault.""}, {""title"": """", ""description"": ""**STEP 7 - Create Access Policy in Keyvault**\n\n Follow these instructions to create access policy in Keyvault.\n 1. Go to keyvaults, select your keyvault, go to Access policies on left side panel, click on create.\n 2. Select all keys & secrets permissions. Click next.\n 3. In the principal section, search by application name which was generated in STEP - 2. Click next.\n\n **Note: **Ensure the Permission model in the Access Configuration of Key Vault is set to **'Vault access policy'**""}, {""title"": """", ""description"": ""**STEP 8 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Vectra data connector, have the Vectra API Authorization Credentials readily available..""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Vectra connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-VectraXDRAPI-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-VectraXDRAPI-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace Name \n\t\tVectra Base URL (https://) \n\t\tVectra Client Id - Health \n\t\tVectra Client Secret Key - Health \n\t\tVectra Client Id - Entity Scoring \n\t\tVectra Client Secret - Entity Scoring \n\t\tVectra Client Id - Detections \n\t\tVectra Client Secret - Detections \n\t\tVectra Client Id - Audits \n\t\tVectra Client Secret - Audits \n\t\tVectra Client Id - Lockdown \n\t\tVectra Client Secret - Lockdown \n\t\tVectra Client Id - Host-Entity \n\t\tVectra Client Secret - Host-Entity \n\t\tVectra Client Id - Account-Entity \n\t\tVectra Client Secret - Account-Entity \n\t\tKey Vault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tTenant Id \n\t\tAzure Entra ObjectID \n\t\tStartTime (in MM/DD/YYYY HH:MM:SS Format) \n\t\tInclude Score Decrease \n\t\tAudits Table Name \n\t\tDetections Table Name \n\t\tEntity Scoring Table Name \n\t\tLockdown Table Name \n\t\tHealth Table Name \n\t\tEntities Table Name \n\t\tExclude Group Details From Detections\n\t\tLog Level (Default: INFO) \n\t\tLockdown Schedule \n\t\tHealth Schedule \n\t\tDetections Schedule \n\t\tAudits Schedule \n\t\tEntity Scoring Schedule \n\t\tEntities Schedule \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Vectra data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-VectraXDR320-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. VECTRAXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tVectra Base URL (https://) \n\t\tVectra Client Id - Health \n\t\tVectra Client Secret Key - Health \n\t\tVectra Client Id - Entity Scoring \n\t\tVectra Client Secret - Entity Scoring \n\t\tVectra Client Id - Detections \n\t\tVectra Client Secret - Detections \n\t\tVectra Client Id - Audits \n\t\tVectra Client Secret - Audits \n\t\tVectra Client Id - Lockdown \n\t\tVectra Client Secret - Lockdown \n\t\tVectra Client Id - Host-Entity \n\t\tVectra Client Secret - Host-Entity \n\t\tVectra Client Id - Account-Entity \n\t\tVectra Client Secret - Account-Entity \n\t\tKey Vault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tTenant Id \n\t\tStartTime (in MM/DD/YYYY HH:MM:SS Format) \n\t\tInclude Score Decrease \n\t\tAudits Table Name \n\t\tDetections Table Name \n\t\tEntity Scoring Table Name \n\t\tLockdown Table Name \n\t\tHealth Table Name \n\t\tEntities Table Name \n\t\tLog Level (Default: INFO) \n\t\tLockdown Schedule \n\t\tHealth Schedule \n\t\tDetections Schedule \n\t\tAudits Schedule \n\t\tEntity Scoring Schedule \n\t\tEntities Schedule \n\t\tlogAnalyticsUri (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Vectra Client ID** and **Client Secret** is required for Health, Entity Scoring, Entities, Detections, Lockdown and Audit data collection. See the documentation to learn more about API on the `https://support.vectra.ai/s/article/KB-VS-1666`.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20XDR/Data%20Connectors/VectraDataConnector/VectraXDR_API_FunctionApp.json","true" +"Health_Data_CL","Vectra XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20XDR","vectraaiinc","vectra-xdr-for-microsoft-sentinel","2023-07-04","2024-08-01","","Vectra Support","Partner","https://www.vectra.ai/support","","domains","VectraXDR","Vectra","Vectra XDR","The [Vectra XDR](https://www.vectra.ai/) connector gives the capability to ingest Vectra Detections, Audits, Entity Scoring, Lockdown, Health and Entities data into Microsoft Sentinel through the Vectra REST API. Refer to the API documentation: `https://support.vectra.ai/s/article/KB-VS-1666` for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Vectra API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. Follow these steps for [Detections Parser](https://aka.ms/sentinel-VectraDetections-parser), [Audits Parser](https://aka.ms/sentinel-VectraAudits-parser), [Entity Scoring Parser](https://aka.ms/sentinel-VectraEntityScoring-parser), [Lockdown Parser](https://aka.ms/sentinel-VectraLockdown-parser) and [Health Parser](https://aka.ms/sentinel-VectraHealth-parser) to create the Kusto functions alias, **VectraDetections**, **VectraAudits**, **VectraEntityScoring**, **VectraLockdown** and **VectraHealth**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Vectra API Credentials**\n\n Follow these instructions to create a Vectra Client ID and Client Secret.\n 1. Log into your Vectra portal\n 2. Navigate to Manage -> API Clients\n 3. From the API Clients page, select 'Add API Client' to create a new client.\n 4. Add Client Name, select Role and click on Generate Credentials to obtain your client credentials. \n 5. Be sure to record your Client ID and Secret Key for safekeeping. You will need these two pieces of information to obtain an access token from the Vectra API. An access token is required to make requests to all of the Vectra API endpoints.""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Vectra Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of Vectra Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Vectra Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Create a Keyvault**\n\n Follow these instructions to create a new Keyvault.\n 1. In the Azure portal, Go to **Key vaults** and click on Create.\n 2. Select Subsciption, Resource Group and provide unique name of keyvault.""}, {""title"": """", ""description"": ""**STEP 7 - Create Access Policy in Keyvault**\n\n Follow these instructions to create access policy in Keyvault.\n 1. Go to keyvaults, select your keyvault, go to Access policies on left side panel, click on create.\n 2. Select all keys & secrets permissions. Click next.\n 3. In the principal section, search by application name which was generated in STEP - 2. Click next.\n\n **Note: **Ensure the Permission model in the Access Configuration of Key Vault is set to **'Vault access policy'**""}, {""title"": """", ""description"": ""**STEP 8 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Vectra data connector, have the Vectra API Authorization Credentials readily available..""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Vectra connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-VectraXDRAPI-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-VectraXDRAPI-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace Name \n\t\tVectra Base URL (https://) \n\t\tVectra Client Id - Health \n\t\tVectra Client Secret Key - Health \n\t\tVectra Client Id - Entity Scoring \n\t\tVectra Client Secret - Entity Scoring \n\t\tVectra Client Id - Detections \n\t\tVectra Client Secret - Detections \n\t\tVectra Client Id - Audits \n\t\tVectra Client Secret - Audits \n\t\tVectra Client Id - Lockdown \n\t\tVectra Client Secret - Lockdown \n\t\tVectra Client Id - Host-Entity \n\t\tVectra Client Secret - Host-Entity \n\t\tVectra Client Id - Account-Entity \n\t\tVectra Client Secret - Account-Entity \n\t\tKey Vault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tTenant Id \n\t\tAzure Entra ObjectID \n\t\tStartTime (in MM/DD/YYYY HH:MM:SS Format) \n\t\tInclude Score Decrease \n\t\tAudits Table Name \n\t\tDetections Table Name \n\t\tEntity Scoring Table Name \n\t\tLockdown Table Name \n\t\tHealth Table Name \n\t\tEntities Table Name \n\t\tExclude Group Details From Detections\n\t\tLog Level (Default: INFO) \n\t\tLockdown Schedule \n\t\tHealth Schedule \n\t\tDetections Schedule \n\t\tAudits Schedule \n\t\tEntity Scoring Schedule \n\t\tEntities Schedule \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Vectra data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-VectraXDR320-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. VECTRAXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tVectra Base URL (https://) \n\t\tVectra Client Id - Health \n\t\tVectra Client Secret Key - Health \n\t\tVectra Client Id - Entity Scoring \n\t\tVectra Client Secret - Entity Scoring \n\t\tVectra Client Id - Detections \n\t\tVectra Client Secret - Detections \n\t\tVectra Client Id - Audits \n\t\tVectra Client Secret - Audits \n\t\tVectra Client Id - Lockdown \n\t\tVectra Client Secret - Lockdown \n\t\tVectra Client Id - Host-Entity \n\t\tVectra Client Secret - Host-Entity \n\t\tVectra Client Id - Account-Entity \n\t\tVectra Client Secret - Account-Entity \n\t\tKey Vault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tTenant Id \n\t\tStartTime (in MM/DD/YYYY HH:MM:SS Format) \n\t\tInclude Score Decrease \n\t\tAudits Table Name \n\t\tDetections Table Name \n\t\tEntity Scoring Table Name \n\t\tLockdown Table Name \n\t\tHealth Table Name \n\t\tEntities Table Name \n\t\tLog Level (Default: INFO) \n\t\tLockdown Schedule \n\t\tHealth Schedule \n\t\tDetections Schedule \n\t\tAudits Schedule \n\t\tEntity Scoring Schedule \n\t\tEntities Schedule \n\t\tlogAnalyticsUri (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Vectra Client ID** and **Client Secret** is required for Health, Entity Scoring, Entities, Detections, Lockdown and Audit data collection. See the documentation to learn more about API on the `https://support.vectra.ai/s/article/KB-VS-1666`.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20XDR/Data%20Connectors/VectraDataConnector/VectraXDR_API_FunctionApp.json","true" +"Lockdown_Data_CL","Vectra XDR","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20XDR","vectraaiinc","vectra-xdr-for-microsoft-sentinel","2023-07-04","2024-08-01","","Vectra Support","Partner","https://www.vectra.ai/support","","domains","VectraXDR","Vectra","Vectra XDR","The [Vectra XDR](https://www.vectra.ai/) connector gives the capability to ingest Vectra Detections, Audits, Entity Scoring, Lockdown, Health and Entities data into Microsoft Sentinel through the Vectra REST API. Refer to the API documentation: `https://support.vectra.ai/s/article/KB-VS-1666` for more information.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Vectra API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. Follow these steps for [Detections Parser](https://aka.ms/sentinel-VectraDetections-parser), [Audits Parser](https://aka.ms/sentinel-VectraAudits-parser), [Entity Scoring Parser](https://aka.ms/sentinel-VectraEntityScoring-parser), [Lockdown Parser](https://aka.ms/sentinel-VectraLockdown-parser) and [Health Parser](https://aka.ms/sentinel-VectraHealth-parser) to create the Kusto functions alias, **VectraDetections**, **VectraAudits**, **VectraEntityScoring**, **VectraLockdown** and **VectraHealth**.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Vectra API Credentials**\n\n Follow these instructions to create a Vectra Client ID and Client Secret.\n 1. Log into your Vectra portal\n 2. Navigate to Manage -> API Clients\n 3. From the API Clients page, select 'Add API Client' to create a new client.\n 4. Add Client Name, select Role and click on Generate Credentials to obtain your client credentials. \n 5. Be sure to record your Client ID and Secret Key for safekeeping. You will need these two pieces of information to obtain an access token from the Vectra API. An access token is required to make requests to all of the Vectra API endpoints.""}, {""title"": """", ""description"": ""**STEP 2 - App Registration steps for the Application in Microsoft Entra ID**\n\n This integration requires an App registration in the Azure portal. Follow the steps in this section to create a new application in Microsoft Entra ID:\n 1. Sign in to the [Azure portal](https://portal.azure.com/).\n 2. Search for and select **Microsoft Entra ID**.\n 3. Under **Manage**, select **App registrations > New registration**.\n 4. Enter a display **Name** for your application.\n 5. Select **Register** to complete the initial app registration.\n 6. When registration finishes, the Azure portal displays the app registration's Overview pane. You see the **Application (client) ID** and **Tenant ID**. The client ID and Tenant ID is required as configuration parameters for the execution of Vectra Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app)""}, {""title"": """", ""description"": ""**STEP 3 - Add a client secret for application in Microsoft Entra ID**\n\n Sometimes called an application password, a client secret is a string value required for the execution of Vectra Data Connector. Follow the steps in this section to create a new Client Secret:\n 1. In the Azure portal, in **App registrations**, select your application.\n 2. Select **Certificates & secrets > Client secrets > New client secret**.\n 3. Add a description for your client secret.\n 4. Select an expiration for the secret or specify a custom lifetime. Limit is 24 months.\n 5. Select **Add**. \n 6. *Record the secret's value for use in your client application code. This secret value is never displayed again after you leave this page.* The secret value is required as configuration parameter for the execution of Vectra Data Connector. \n\n> **Reference link:** [https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-client-secret)""}, {""title"": """", ""description"": ""**STEP 4 - Get Object ID of your application in Microsoft Entra ID**\n\n After creating your app registration, follow the steps in this section to get Object ID:\n 1. Go to **Microsoft Entra ID**.\n 2. Select **Enterprise applications** from the left menu.\n 3. Find your newly created application in the list (you can search by the name you provided).\n 4. Click on the application.\n 5. On the overview page, copy the **Object ID**. This is the **AzureEntraObjectId** needed for your ARM template role assignment.\n""}, {""title"": """", ""description"": ""**STEP 5 - Assign role of Contributor to application in Microsoft Entra ID**\n\n Follow the steps in this section to assign the role:\n 1. In the Azure portal, Go to **Resource Group** and select your resource group.\n 2. Go to **Access control (IAM)** from left panel.\n 3. Click on **Add**, and then select **Add role assignment**.\n 4. Select **Contributor** as role and click on next.\n 5. In **Assign access to**, select `User, group, or service principal`.\n 6. Click on **add members** and type **your app name** that you have created and select it.\n 7. Now click on **Review + assign** and then again click on **Review + assign**. \n\n> **Reference link:** [https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal](https://learn.microsoft.com/azure/role-based-access-control/role-assignments-portal)""}, {""title"": """", ""description"": ""**STEP 6 - Create a Keyvault**\n\n Follow these instructions to create a new Keyvault.\n 1. In the Azure portal, Go to **Key vaults** and click on Create.\n 2. Select Subsciption, Resource Group and provide unique name of keyvault.""}, {""title"": """", ""description"": ""**STEP 7 - Create Access Policy in Keyvault**\n\n Follow these instructions to create access policy in Keyvault.\n 1. Go to keyvaults, select your keyvault, go to Access policies on left side panel, click on create.\n 2. Select all keys & secrets permissions. Click next.\n 3. In the principal section, search by application name which was generated in STEP - 2. Click next.\n\n **Note: **Ensure the Permission model in the Access Configuration of Key Vault is set to **'Vault access policy'**""}, {""title"": """", ""description"": ""**STEP 8 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Vectra data connector, have the Vectra API Authorization Credentials readily available..""}, {""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Vectra connector.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-VectraXDRAPI-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-VectraXDRAPI-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the below information : \n\t\tFunction Name \n\t\tWorkspace Name \n\t\tVectra Base URL (https://) \n\t\tVectra Client Id - Health \n\t\tVectra Client Secret Key - Health \n\t\tVectra Client Id - Entity Scoring \n\t\tVectra Client Secret - Entity Scoring \n\t\tVectra Client Id - Detections \n\t\tVectra Client Secret - Detections \n\t\tVectra Client Id - Audits \n\t\tVectra Client Secret - Audits \n\t\tVectra Client Id - Lockdown \n\t\tVectra Client Secret - Lockdown \n\t\tVectra Client Id - Host-Entity \n\t\tVectra Client Secret - Host-Entity \n\t\tVectra Client Id - Account-Entity \n\t\tVectra Client Secret - Account-Entity \n\t\tKey Vault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tTenant Id \n\t\tAzure Entra ObjectID \n\t\tStartTime (in MM/DD/YYYY HH:MM:SS Format) \n\t\tInclude Score Decrease \n\t\tAudits Table Name \n\t\tDetections Table Name \n\t\tEntity Scoring Table Name \n\t\tLockdown Table Name \n\t\tHealth Table Name \n\t\tEntities Table Name \n\t\tExclude Group Details From Detections\n\t\tLog Level (Default: INFO) \n\t\tLockdown Schedule \n\t\tHealth Schedule \n\t\tDetections Schedule \n\t\tAudits Schedule \n\t\tEntity Scoring Schedule \n\t\tEntities Schedule \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Vectra data connector manually with Azure Functions (Deployment via Visual Studio Code).""}, {""title"": """", ""description"": ""**1. Deploy a Function App**\n\n> **NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-VectraXDR320-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. VECTRAXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.8 or above.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration.""}, {""title"": """", ""description"": ""**2. Configure the Function App**\n\n1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select **+ New application setting**.\n3. Add each of the following application settings individually, with their respective values (case-sensitive): \n\t\tWorkspace ID \n\t\tWorkspace Key \n\t\tVectra Base URL (https://) \n\t\tVectra Client Id - Health \n\t\tVectra Client Secret Key - Health \n\t\tVectra Client Id - Entity Scoring \n\t\tVectra Client Secret - Entity Scoring \n\t\tVectra Client Id - Detections \n\t\tVectra Client Secret - Detections \n\t\tVectra Client Id - Audits \n\t\tVectra Client Secret - Audits \n\t\tVectra Client Id - Lockdown \n\t\tVectra Client Secret - Lockdown \n\t\tVectra Client Id - Host-Entity \n\t\tVectra Client Secret - Host-Entity \n\t\tVectra Client Id - Account-Entity \n\t\tVectra Client Secret - Account-Entity \n\t\tKey Vault Name \n\t\tAzure Client Id \n\t\tAzure Client Secret \n\t\tTenant Id \n\t\tStartTime (in MM/DD/YYYY HH:MM:SS Format) \n\t\tInclude Score Decrease \n\t\tAudits Table Name \n\t\tDetections Table Name \n\t\tEntity Scoring Table Name \n\t\tLockdown Table Name \n\t\tHealth Table Name \n\t\tEntities Table Name \n\t\tLog Level (Default: INFO) \n\t\tLockdown Schedule \n\t\tHealth Schedule \n\t\tDetections Schedule \n\t\tAudits Schedule \n\t\tEntity Scoring Schedule \n\t\tEntities Schedule \n\t\tlogAnalyticsUri (optional) \n - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**Vectra Client ID** and **Client Secret** is required for Health, Entity Scoring, Entities, Detections, Lockdown and Audit data collection. See the documentation to learn more about API on the `https://support.vectra.ai/s/article/KB-VS-1666`.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Vectra%20XDR/Data%20Connectors/VectraDataConnector/VectraXDR_API_FunctionApp.json","true" +"VeeamAuthorizationEvents_CL","Veeam","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Veeam","veeamsoftware","azure-sentinel-solution-veeamapp","2025-08-26","","3.0.2","Veeam Software","Partner","https://helpcenter.veeam.com/docs/security_plugins_microsoft_sentinel/guide/","","domains","VeeamCustomTablesDataConnector","Veeam","Veeam Data Connector (using Azure Functions)","Veeam Data Connector allows you to ingest Veeam telemetry data from multiple custom tables into Microsoft Sentinel.

The connector supports integration with Veeam Backup & Replication, Veeam ONE and Coveware platforms to provide comprehensive monitoring and security analytics. The data is collected through Azure Functions and stored in custom Log Analytics tables with dedicated Data Collection Rules (DCR) and Data Collection Endpoints (DCE).

**Custom Tables Included:**
- **VeeamMalwareEvents_CL**: Malware detection events from Veeam Backup & Replication
- **VeeamSecurityComplianceAnalyzer_CL**: Security & Compliance Analyzer results collected from Veeam backup infrastructure components
- **VeeamAuthorizationEvents_CL**: Authorization and authentication events
- **VeeamOneTriggeredAlarms_CL**: Triggered alarms from Veeam ONE servers
- **VeeamCovewareFindings_CL**: Security findings from Coveware solution
- **VeeamSessions_CL**: Veeam sessions","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to Veeam APIs and pull data into Microsoft Sentinel custom tables. This may result in additional data ingestion costs. See the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": ""**STEP 1 - Select the deployment option for Veeam Data Connector and associated Azure Functions**\n\n>**IMPORTANT:** Before you deploy Veeam Data Connector, prepare Workspace Name (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceName""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Veeam data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FVeeam%2FData%2520Connectors%2Fazuredeploy_Veeam_API_FunctionApp.json)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Microsoft Sentinel Workspace Name**. \n4. Click **Review + Create**, **Create**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Veeam Infrastructure Access"", ""description"": ""Access to Veeam Backup & Replication REST API and Veeam ONE monitoring platform is required. This includes proper authentication credentials and network connectivity.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Veeam/Data%20Connectors/Veeam_API_FunctionApp.json","true" +"VeeamCovewareFindings_CL","Veeam","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Veeam","veeamsoftware","azure-sentinel-solution-veeamapp","2025-08-26","","3.0.2","Veeam Software","Partner","https://helpcenter.veeam.com/docs/security_plugins_microsoft_sentinel/guide/","","domains","VeeamCustomTablesDataConnector","Veeam","Veeam Data Connector (using Azure Functions)","Veeam Data Connector allows you to ingest Veeam telemetry data from multiple custom tables into Microsoft Sentinel.

The connector supports integration with Veeam Backup & Replication, Veeam ONE and Coveware platforms to provide comprehensive monitoring and security analytics. The data is collected through Azure Functions and stored in custom Log Analytics tables with dedicated Data Collection Rules (DCR) and Data Collection Endpoints (DCE).

**Custom Tables Included:**
- **VeeamMalwareEvents_CL**: Malware detection events from Veeam Backup & Replication
- **VeeamSecurityComplianceAnalyzer_CL**: Security & Compliance Analyzer results collected from Veeam backup infrastructure components
- **VeeamAuthorizationEvents_CL**: Authorization and authentication events
- **VeeamOneTriggeredAlarms_CL**: Triggered alarms from Veeam ONE servers
- **VeeamCovewareFindings_CL**: Security findings from Coveware solution
- **VeeamSessions_CL**: Veeam sessions","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to Veeam APIs and pull data into Microsoft Sentinel custom tables. This may result in additional data ingestion costs. See the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": ""**STEP 1 - Select the deployment option for Veeam Data Connector and associated Azure Functions**\n\n>**IMPORTANT:** Before you deploy Veeam Data Connector, prepare Workspace Name (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceName""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Veeam data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FVeeam%2FData%2520Connectors%2Fazuredeploy_Veeam_API_FunctionApp.json)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Microsoft Sentinel Workspace Name**. \n4. Click **Review + Create**, **Create**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Veeam Infrastructure Access"", ""description"": ""Access to Veeam Backup & Replication REST API and Veeam ONE monitoring platform is required. This includes proper authentication credentials and network connectivity.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Veeam/Data%20Connectors/Veeam_API_FunctionApp.json","true" +"VeeamMalwareEvents_CL","Veeam","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Veeam","veeamsoftware","azure-sentinel-solution-veeamapp","2025-08-26","","3.0.2","Veeam Software","Partner","https://helpcenter.veeam.com/docs/security_plugins_microsoft_sentinel/guide/","","domains","VeeamCustomTablesDataConnector","Veeam","Veeam Data Connector (using Azure Functions)","Veeam Data Connector allows you to ingest Veeam telemetry data from multiple custom tables into Microsoft Sentinel.

The connector supports integration with Veeam Backup & Replication, Veeam ONE and Coveware platforms to provide comprehensive monitoring and security analytics. The data is collected through Azure Functions and stored in custom Log Analytics tables with dedicated Data Collection Rules (DCR) and Data Collection Endpoints (DCE).

**Custom Tables Included:**
- **VeeamMalwareEvents_CL**: Malware detection events from Veeam Backup & Replication
- **VeeamSecurityComplianceAnalyzer_CL**: Security & Compliance Analyzer results collected from Veeam backup infrastructure components
- **VeeamAuthorizationEvents_CL**: Authorization and authentication events
- **VeeamOneTriggeredAlarms_CL**: Triggered alarms from Veeam ONE servers
- **VeeamCovewareFindings_CL**: Security findings from Coveware solution
- **VeeamSessions_CL**: Veeam sessions","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to Veeam APIs and pull data into Microsoft Sentinel custom tables. This may result in additional data ingestion costs. See the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": ""**STEP 1 - Select the deployment option for Veeam Data Connector and associated Azure Functions**\n\n>**IMPORTANT:** Before you deploy Veeam Data Connector, prepare Workspace Name (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceName""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Veeam data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FVeeam%2FData%2520Connectors%2Fazuredeploy_Veeam_API_FunctionApp.json)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Microsoft Sentinel Workspace Name**. \n4. Click **Review + Create**, **Create**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Veeam Infrastructure Access"", ""description"": ""Access to Veeam Backup & Replication REST API and Veeam ONE monitoring platform is required. This includes proper authentication credentials and network connectivity.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Veeam/Data%20Connectors/Veeam_API_FunctionApp.json","true" +"VeeamOneTriggeredAlarms_CL","Veeam","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Veeam","veeamsoftware","azure-sentinel-solution-veeamapp","2025-08-26","","3.0.2","Veeam Software","Partner","https://helpcenter.veeam.com/docs/security_plugins_microsoft_sentinel/guide/","","domains","VeeamCustomTablesDataConnector","Veeam","Veeam Data Connector (using Azure Functions)","Veeam Data Connector allows you to ingest Veeam telemetry data from multiple custom tables into Microsoft Sentinel.

The connector supports integration with Veeam Backup & Replication, Veeam ONE and Coveware platforms to provide comprehensive monitoring and security analytics. The data is collected through Azure Functions and stored in custom Log Analytics tables with dedicated Data Collection Rules (DCR) and Data Collection Endpoints (DCE).

**Custom Tables Included:**
- **VeeamMalwareEvents_CL**: Malware detection events from Veeam Backup & Replication
- **VeeamSecurityComplianceAnalyzer_CL**: Security & Compliance Analyzer results collected from Veeam backup infrastructure components
- **VeeamAuthorizationEvents_CL**: Authorization and authentication events
- **VeeamOneTriggeredAlarms_CL**: Triggered alarms from Veeam ONE servers
- **VeeamCovewareFindings_CL**: Security findings from Coveware solution
- **VeeamSessions_CL**: Veeam sessions","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to Veeam APIs and pull data into Microsoft Sentinel custom tables. This may result in additional data ingestion costs. See the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": ""**STEP 1 - Select the deployment option for Veeam Data Connector and associated Azure Functions**\n\n>**IMPORTANT:** Before you deploy Veeam Data Connector, prepare Workspace Name (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceName""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Veeam data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FVeeam%2FData%2520Connectors%2Fazuredeploy_Veeam_API_FunctionApp.json)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Microsoft Sentinel Workspace Name**. \n4. Click **Review + Create**, **Create**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Veeam Infrastructure Access"", ""description"": ""Access to Veeam Backup & Replication REST API and Veeam ONE monitoring platform is required. This includes proper authentication credentials and network connectivity.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Veeam/Data%20Connectors/Veeam_API_FunctionApp.json","true" +"VeeamSecurityComplianceAnalyzer_CL","Veeam","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Veeam","veeamsoftware","azure-sentinel-solution-veeamapp","2025-08-26","","3.0.2","Veeam Software","Partner","https://helpcenter.veeam.com/docs/security_plugins_microsoft_sentinel/guide/","","domains","VeeamCustomTablesDataConnector","Veeam","Veeam Data Connector (using Azure Functions)","Veeam Data Connector allows you to ingest Veeam telemetry data from multiple custom tables into Microsoft Sentinel.

The connector supports integration with Veeam Backup & Replication, Veeam ONE and Coveware platforms to provide comprehensive monitoring and security analytics. The data is collected through Azure Functions and stored in custom Log Analytics tables with dedicated Data Collection Rules (DCR) and Data Collection Endpoints (DCE).

**Custom Tables Included:**
- **VeeamMalwareEvents_CL**: Malware detection events from Veeam Backup & Replication
- **VeeamSecurityComplianceAnalyzer_CL**: Security & Compliance Analyzer results collected from Veeam backup infrastructure components
- **VeeamAuthorizationEvents_CL**: Authorization and authentication events
- **VeeamOneTriggeredAlarms_CL**: Triggered alarms from Veeam ONE servers
- **VeeamCovewareFindings_CL**: Security findings from Coveware solution
- **VeeamSessions_CL**: Veeam sessions","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to Veeam APIs and pull data into Microsoft Sentinel custom tables. This may result in additional data ingestion costs. See the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": ""**STEP 1 - Select the deployment option for Veeam Data Connector and associated Azure Functions**\n\n>**IMPORTANT:** Before you deploy Veeam Data Connector, prepare Workspace Name (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceName""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Veeam data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FVeeam%2FData%2520Connectors%2Fazuredeploy_Veeam_API_FunctionApp.json)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Microsoft Sentinel Workspace Name**. \n4. Click **Review + Create**, **Create**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Veeam Infrastructure Access"", ""description"": ""Access to Veeam Backup & Replication REST API and Veeam ONE monitoring platform is required. This includes proper authentication credentials and network connectivity.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Veeam/Data%20Connectors/Veeam_API_FunctionApp.json","true" +"VeeamSessions_CL","Veeam","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Veeam","veeamsoftware","azure-sentinel-solution-veeamapp","2025-08-26","","3.0.2","Veeam Software","Partner","https://helpcenter.veeam.com/docs/security_plugins_microsoft_sentinel/guide/","","domains","VeeamCustomTablesDataConnector","Veeam","Veeam Data Connector (using Azure Functions)","Veeam Data Connector allows you to ingest Veeam telemetry data from multiple custom tables into Microsoft Sentinel.

The connector supports integration with Veeam Backup & Replication, Veeam ONE and Coveware platforms to provide comprehensive monitoring and security analytics. The data is collected through Azure Functions and stored in custom Log Analytics tables with dedicated Data Collection Rules (DCR) and Data Collection Endpoints (DCE).

**Custom Tables Included:**
- **VeeamMalwareEvents_CL**: Malware detection events from Veeam Backup & Replication
- **VeeamSecurityComplianceAnalyzer_CL**: Security & Compliance Analyzer results collected from Veeam backup infrastructure components
- **VeeamAuthorizationEvents_CL**: Authorization and authentication events
- **VeeamOneTriggeredAlarms_CL**: Triggered alarms from Veeam ONE servers
- **VeeamCovewareFindings_CL**: Security findings from Coveware solution
- **VeeamSessions_CL**: Veeam sessions","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to Veeam APIs and pull data into Microsoft Sentinel custom tables. This may result in additional data ingestion costs. See the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": ""**STEP 1 - Select the deployment option for Veeam Data Connector and associated Azure Functions**\n\n>**IMPORTANT:** Before you deploy Veeam Data Connector, prepare Workspace Name (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceName""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Veeam data connector using an ARM Template.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FVeeam%2FData%2520Connectors%2Fazuredeploy_Veeam_API_FunctionApp.json)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Microsoft Sentinel Workspace Name**. \n4. Click **Review + Create**, **Create**.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Veeam Infrastructure Access"", ""description"": ""Access to Veeam Backup & Replication REST API and Veeam ONE monitoring platform is required. This includes proper authentication credentials and network connectivity.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Veeam/Data%20Connectors/Veeam_API_FunctionApp.json","true" +"","Veritas NetBackup","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Veritas%20NetBackup","veritas","veritas-sentinel","2023-09-25","","","Veritas Technologies LLC","Partner","https://www.veritas.com/content/support/en_US/contact-us","","domains","","","","","","","","false" +"CommonSecurityLog","VirtualMetric DataStream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VirtualMetric%20DataStream","virtualmetric","azure-sentinel-solution-virtualmetric-datastream","2025-09-15","","","VirtualMetric","Partner","https://support.virtualmetric.com","VirtualMetric","domains","VirtualMetricDirectorProxy","VirtualMetric","VirtualMetric Director Proxy","VirtualMetric Director Proxy deploys an Azure Function App to securely bridge VirtualMetric DataStream with Azure services including Microsoft Sentinel, Azure Data Explorer, and Azure Storage.","[{""title"": ""Deploy VirtualMetric Director Proxy"", ""description"": ""Deploy the Azure Function App that serves as a secure proxy between VirtualMetric DataStream and Microsoft Sentinel."", ""instructions"": [{""type"": ""InstructionStepsGroup"", ""parameters"": {""enable"": true, ""instructionSteps"": [{""title"": ""Prerequisites and Deployment Order"", ""description"": ""**Recommended Deployment Order:**\n\nFor optimal configuration, consider deploying the target connectors first:\n\n1. **Deploy Microsoft Sentinel Connector**: Deploy the VirtualMetric DataStream for Microsoft Sentinel connector first to create the required Data Collection Endpoints and Rules.\n\n2. **Deploy Microsoft Sentinel data lake Connector** (optional): If using Microsoft Sentinel data lake tables, deploy the VirtualMetric DataStream for Microsoft Sentinel data lake connector.\n\n3. **Deploy Director Proxy** (this step): The Director Proxy can then be configured with your Microsoft Sentinel targets.\n\n**Note:** This order is recommended but not required. You can deploy the Director Proxy independently and configure it with your targets later.""}, {""title"": ""Deploy Azure Function App"", ""description"": ""Deploy the VirtualMetric Director Proxy Azure Function App using the Deploy to Azure button.\n\n1. **Deploy to Azure**:\n - Click the Deploy to Azure button below to deploy the Function App:\n - [![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FVirtualMetric%2520DataStream%2FData%2520Connectors%2FVirtualMetric-DirectorProxy%2FDeployToAzure.json)\n\n2. **Configure Deployment Parameters**:\n - **Subscription**: Select your Azure subscription\n - **Resource Group**: Choose the same resource group as your Microsoft Sentinel workspace or create a new one\n - **Region**: Select the Azure region (should match your Microsoft Sentinel workspace region)\n - **Function App Name**: Provide a unique name for the Function App (e.g., \""vmetric-director-proxy\"")\n\n3. **Complete Deployment**:\n - Click **Review + create** to validate the parameters\n - Click **Create** to deploy the Function App\n - Wait for deployment to complete (typically 3-5 minutes)\n - Note the Function App URL: `https://.azurewebsites.net`""}, {""title"": ""Configure Function App Permissions"", ""description"": ""Assign the necessary permissions to the Function App's managed identity to access Microsoft Sentinel resources.\n\n1. **Enable System-Assigned Managed Identity**:\n - Navigate to your deployed Function App in Azure Portal\n - Go to **Identity** under Settings\n - Toggle **Status** to **On** for System assigned identity\n - Click **Save** and confirm\n\n2. **Navigate to Resource Group**:\n - Go to the resource group containing your Microsoft Sentinel workspace and Data Collection Endpoints\n\n3. **Assign Required Roles**:\n - Open **Access control (IAM)**\n - Click **+ Add** > **Add role assignment**\n - Assign the following roles to the Function App's system-assigned managed identity:\n - **Monitoring Metrics Publisher**: For sending data to Data Collection Endpoints\n - **Monitoring Reader**: For reading Data Collection Rules configuration\n\n4. **Select the Function App Identity**:\n - In **Members** tab, select **Managed identity**\n - Choose **Function App** and select your deployed Director Proxy Function App\n - Complete the role assignment\n\n5. **Get Function App Access Token** (Optional for Function Key authentication):\n - Navigate to your Function App\n - Go to **App keys** under Functions\n - Copy the default host key or create a new function key for authentication""}, {""title"": ""Configure VirtualMetric DataStream Integration"", ""description"": ""Set up VirtualMetric DataStream to send security telemetry to Microsoft Sentinel through the Director Proxy.\n\n1. **Access VirtualMetric DataStream Configuration**:\n - Log into your **VirtualMetric DataStream** management console\n - Navigate to **Targets** section\n - Click **Microsoft Sentinel Targets**\n - Click **Add new target** or edit an existing Microsoft Sentinel target\n\n2. **Configure General Settings**:\n - **Name**: Enter a name for your target (e.g., \""sentinel-with-proxy\"")\n - **Description**: Optionally provide a description for the target configuration\n\n3. **Configure Azure Authentication**:\n \n **For Service Principal Authentication:**\n - **Managed Identity for Azure**: Keep **Disabled**\n - **Tenant ID**: Enter your Azure Active Directory tenant ID\n - **Client ID**: Enter your service principal application ID\n - **Client Secret**: Enter your service principal client secret\n \n **For Azure Managed Identity:**\n - **Managed Identity for Azure**: Set to **Enabled**\n\n4. **Configure Director Proxy** (in Azure Properties tab):\n - **Endpoint Address**: Enter the Function App URL from Step 2 (format: `https://.azurewebsites.net`)\n - **Access Token**: Enter the Function App host key from Step 3 (optional if using Managed Identity)\n\n5. **Configure Stream Properties**:\n - **Endpoint**: Enter the DCE Logs Ingestion URI (format: `https://..ingest.monitor.azure.com`)\n - **Streams**: Select **Auto** for automatic stream detection, or configure specific streams if needed\n\n6. **Verify Data Ingestion in Microsoft Sentinel**:\n - Return to your **Log Analytics Workspace**\n - Run sample queries to confirm data is being received:\n ```kql\n CommonSecurityLog\n | where TimeGenerated > ago(1h)\n | take 10\n ```\n - Check the **Microsoft Sentinel Overview** dashboard for new data sources and event counts""}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": false}}], ""customs"": [{""name"": ""Azure Function App"", ""description"": ""An Azure Function App must be deployed to host the Director Proxy. Requires read, write, and delete permissions on Microsoft.Web/sites resources within your resource group to create and manage the Function App.""}, {""name"": ""VirtualMetric DataStream Configuration"", ""description"": ""You need VirtualMetric DataStream configured with authentication credentials to connect to the Director Proxy. The Director Proxy acts as a secure bridge between VirtualMetric DataStream and Azure services.""}, {""name"": ""Target Azure Services"", ""description"": ""Configure your target Azure services such as Microsoft Sentinel Data Collection Endpoints, Azure Data Explorer clusters, or Azure Storage accounts where the Director Proxy will forward data.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VirtualMetric%20DataStream/Data%20Connectors/VirtualMetric-DirectorProxy/Template_DirectorProxy.json","true" +"CommonSecurityLog","VirtualMetric DataStream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VirtualMetric%20DataStream","virtualmetric","azure-sentinel-solution-virtualmetric-datastream","2025-09-15","","","VirtualMetric","Partner","https://support.virtualmetric.com","VirtualMetric","domains","VirtualMetricMSSentinelConnector","VirtualMetric","VirtualMetric DataStream for Microsoft Sentinel","VirtualMetric DataStream connector deploys Data Collection Rules to ingest security telemetry into Microsoft Sentinel.","[{""title"": ""Configure VirtualMetric DataStream for Microsoft Sentinel"", ""description"": ""Configure the VirtualMetric DataStream for Microsoft Sentinel to send data."", ""instructions"": [{""type"": ""InstructionStepsGroup"", ""parameters"": {""enable"": true, ""instructionSteps"": [{""title"": ""Register Application in Microsoft Entra ID (Optional)"", ""description"": ""**Choose your authentication method:**\n\n**Option A: Use Azure Managed Identity (Recommended)**\n- Skip this step if you plan to use Azure Managed Identity for authentication.\n- Azure Managed Identity provides a more secure authentication method without managing credentials.\n\n**Option B: Register a Service Principal Application**\n\n1. **Open the [Microsoft Entra ID page](https://entra.microsoft.com/)**:\n - Click the provided link to open the **Microsoft Entra ID** registration page in a new tab.\n - Ensure you are logged in with an account that has **Application Administrator** or **Global Administrator** permissions.\n\n2. **Create a New Application**:\n - In the **Microsoft Entra ID portal**, select **App registrations** from the left-hand navigation.\n - Click on **+ New registration**.\n - Fill out the following fields:\n - **Name**: Enter a descriptive name for the app (e.g., \""VirtualMetric ASIM Connector\"").\n - **Supported account types**: Choose **Accounts in this organizational directory only** (Single tenant).\n - **Redirect URI**: Leave this blank.\n - Click **Register** to create the application.\n\n3. **Copy Application and Tenant IDs**:\n - Once the app is registered, note the **Application (client) ID** and **Directory (tenant) ID** from the **Overview** page. You'll need these for VirtualMetric DataStream configuration.\n\n4. **Create a Client Secret**:\n - In the **Certificates & secrets** section, click **+ New client secret**.\n - Add a description (e.g., 'VirtualMetric ASIM Secret') and set an appropriate expiration period.\n - Click **Add**.\n - **Copy the client secret value immediately**, as it will not be shown again. Store this securely for VirtualMetric DataStream configuration.""}, {""title"": ""Assign Required Permissions"", ""description"": ""Assign the required roles to your chosen authentication method (Service Principal or Managed Identity) in the resource group.\n\n**For Service Principal (if you completed Step 1):**\n\n1. **Navigate to Your Resource Group**:\n - Open the **Azure Portal** and navigate to the **Resource Group** that contains your **Log Analytics Workspace** and where **Data Collection Rules (DCRs)** will be deployed.\n\n2. **Assign the Monitoring Metrics Publisher Role**:\n - In the **Resource Group**, click on **Access control (IAM)** from the left-hand menu.\n - Click **+ Add** and select **Add role assignment**.\n - In the **Role** tab, search for and select **Monitoring Metrics Publisher**.\n - Click **Next** to go to the **Members** tab.\n - Under **Assign access to**, select **User, group, or service principal**.\n - Click **+ Select members** and search for your registered application by name or client ID.\n - Select your application and click **Select**.\n - Click **Review + assign** twice to complete the assignment.\n\n3. **Assign the Monitoring Reader Role**:\n - Repeat the same process to assign the **Monitoring Reader** role:\n - Click **+ Add** and select **Add role assignment**.\n - In the **Role** tab, search for and select **Monitoring Reader**.\n - Follow the same member selection process as above.\n - Click **Review + assign** twice to complete the assignment.\n\n**For Azure Managed Identity:**\n\n1. **Create or Identify Your Managed Identity**:\n - If using **System-assigned Managed Identity**: Enable it on your Azure resource (VM, App Service, etc.).\n - If using **User-assigned Managed Identity**: Create one in your resource group if it doesn't exist.\n\n2. **Assign the Monitoring Metrics Publisher Role**:\n - Follow the same steps as above, but in the **Members** tab:\n - Under **Assign access to**, select **Managed identity**.\n - Click **+ Select members** and choose the appropriate managed identity type and select your identity.\n - Click **Select**, then **Review + assign** twice to complete.\n\n3. **Assign the Monitoring Reader Role**:\n - Repeat the process to assign the **Monitoring Reader** role to the same managed identity.\n\n**Required Permission Summary:**\nThe assigned roles provide the following capabilities:\n- **Monitoring Metrics Publisher**: Write data to Data Collection Endpoints (DCE) and send telemetry through Data Collection Rules (DCR)\n- **Monitoring Reader**: Read stream configuration and access Log Analytics workspace for ASIM table ingestion""}, {""title"": ""Deploy Azure Infrastructure"", ""description"": ""Deploy the required Data Collection Endpoint (DCE) and Data Collection Rules (DCR) for Microsoft Sentinel tables using our ARM template.\n\n1. **Deploy to Azure**:\n - Click the Deploy to Azure button below to automatically deploy the required infrastructure:\n - [![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FVirtualMetric%2520DataStream%2FData%2520Connectors%2FVirtualMetric-Sentinel%2FDeployToAzure.json)\n - This will take you directly to the Azure portal to start the deployment.\n\n2. **Configure Deployment Parameters**:\n - On the custom deployment page, configure the following settings:\n \n **Project details:**\n - **Subscription**: Select your Azure subscription from the dropdown\n - **Resource group**: Select an existing resource group or click **Create new** to create a new one\n \n **Instance details:**\n - **Region**: Select the Azure region where your Log Analytics workspace is located (e.g., West Europe)\n - **Workspace**: Enter your Log Analytics workspace name\n - **DCE Name**: Provide a name for the Data Collection Endpoint (e.g., \""vmetric-dce\"")\n - **DCR Name Prefix**: Provide a prefix for the Data Collection Rules (e.g., \""vmetric-dcr\"")\n\n3. **Complete the Deployment**:\n - Click **Review + create** to validate the template.\n - Review the parameters and click **Create** to deploy the resources.\n - Wait for the deployment to complete (typically takes 2-5 minutes).\n\n4. **Verify Deployed Resources**:\n - After deployment, verify the following resources were created:\n - **Data Collection Endpoint (DCE)**: Check **Azure Portal > Monitor > Data Collection Endpoints**\n - **Data Collection Rules (DCRs)**: Check **Azure Portal > Monitor > Data Collection Rules**\n - **Copy the DCE Logs Ingestion URI** from the DCE **Overview** page (format: `https://..ingest.monitor.azure.com`)\n - **Copy the DCE Resource ID** from the DCE **Overview** page (format: `/subscriptions//resourceGroups//providers/Microsoft.Insights/dataCollectionEndpoints/`)\n - For each DCR, note the **Immutable ID** from the **Overview** page - you'll need these for VirtualMetric DataStream configuration.""}, {""title"": ""Configure VirtualMetric DataStream Integration"", ""description"": ""Set up VirtualMetric DataStream to send security telemetry to Microsoft Sentinel tables.\n\n1. **Access VirtualMetric DataStream Configuration**:\n - Log into your **VirtualMetric DataStream** management console.\n - Navigate to **Fleet Management** > **Targets** section.\n - Click **Add new target** button.\n - Select **Microsoft Sentinel** target.\n\n2. **Configure General Settings**:\n - **Name**: Enter a name for your target (e.g., \""cus01-ms-sentinel\"")\n - **Description**: Optionally provide a description for the target configuration\n\n3. **Configure Azure Authentication** (choose based on Step 1):\n \n **For Service Principal Authentication:**\n - **Managed Identity for Azure**: Keep **Disabled**\n - **Tenant ID**: Enter the Directory (tenant) ID from Step 1\n - **Client ID**: Enter the Application (client) ID from Step 1\n - **Client Secret**: Enter the client secret value from Step 1\n \n **For Azure Managed Identity:**\n - **Managed Identity for Azure**: Set to **Enabled**\n\n4. **Configure Stream Properties**:\n - **Endpoint**: Choose your configuration method:\n - **For manual stream configuration**: Enter the DCE Logs Ingestion URI (format: `https://..ingest.monitor.azure.com`)\n - **For auto stream detection**: Enter the DCE Resource ID (format: `/subscriptions//resourceGroups//providers/Microsoft.Insights/dataCollectionEndpoints/`)\n - **Streams**: Select **Auto** for automatic stream detection, or configure specific streams if needed\n\n5. **Verify Data Ingestion in Microsoft Sentinel**:\n - Return to your **Log Analytics Workspace**\n - Run sample queries on the ASIM tables to confirm data is being received:\n ```kql\n ASimNetworkSessionLogs\n | where TimeGenerated > ago(1h)\n | take 10\n ```\n - Check the **Microsoft Sentinel Overview** dashboard for new data sources and event counts.""}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": false}}], ""customs"": [{""name"": ""App Registration or Azure Managed Identity"", ""description"": ""VirtualMetric DataStream requires an Entra ID identity to authenticate and send logs to Microsoft Sentinel. You can choose between creating an App Registration with Client ID and Client Secret, or using Azure Managed Identity for enhanced security without credential management.""}, {""name"": ""Resource Group Role Assignment"", ""description"": ""The chosen identity (App Registration or Managed Identity) must be assigned to the resource group containing the Data Collection Endpoint with the following roles: Monitoring Metrics Publisher (for log ingestion) and Monitoring Reader (for reading stream configuration).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VirtualMetric%20DataStream/Data%20Connectors/VirtualMetric-Sentinel/Template_Sentinel.json","true" +"CommonSecurityLog","VirtualMetric DataStream","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VirtualMetric%20DataStream","virtualmetric","azure-sentinel-solution-virtualmetric-datastream","2025-09-15","","","VirtualMetric","Partner","https://support.virtualmetric.com","VirtualMetric","domains","VirtualMetricMSSentinelDataLakeConnector","VirtualMetric","VirtualMetric DataStream for Microsoft Sentinel data lake","VirtualMetric DataStream connector deploys Data Collection Rules to ingest security telemetry into Microsoft Sentinel data lake.","[{""title"": ""Configure VirtualMetric DataStream for Microsoft Sentinel data lake"", ""description"": ""Configure the VirtualMetric DataStream for Microsoft Sentinel data lake to send data."", ""instructions"": [{""type"": ""InstructionStepsGroup"", ""parameters"": {""enable"": true, ""instructionSteps"": [{""title"": ""Register Application in Microsoft Entra ID (Optional)"", ""description"": ""**Choose your authentication method:**\n\n**Option A: Use Azure Managed Identity (Recommended)**\n- Skip this step if you plan to use Azure Managed Identity for authentication.\n- Azure Managed Identity provides a more secure authentication method without managing credentials.\n\n**Option B: Register a Service Principal Application**\n\n1. **Open the [Microsoft Entra ID page](https://entra.microsoft.com/)**:\n - Click the provided link to open the **Microsoft Entra ID** registration page in a new tab.\n - Ensure you are logged in with an account that has **Application Administrator** or **Global Administrator** permissions.\n\n2. **Create a New Application**:\n - In the **Microsoft Entra ID portal**, select **App registrations** from the left-hand navigation.\n - Click on **+ New registration**.\n - Fill out the following fields:\n - **Name**: Enter a descriptive name for the app (e.g., \""VirtualMetric ASIM Connector\"").\n - **Supported account types**: Choose **Accounts in this organizational directory only** (Single tenant).\n - **Redirect URI**: Leave this blank.\n - Click **Register** to create the application.\n\n3. **Copy Application and Tenant IDs**:\n - Once the app is registered, note the **Application (client) ID** and **Directory (tenant) ID** from the **Overview** page. You'll need these for VirtualMetric DataStream configuration.\n\n4. **Create a Client Secret**:\n - In the **Certificates & secrets** section, click **+ New client secret**.\n - Add a description (e.g., 'VirtualMetric ASIM Secret') and set an appropriate expiration period.\n - Click **Add**.\n - **Copy the client secret value immediately**, as it will not be shown again. Store this securely for VirtualMetric DataStream configuration.""}, {""title"": ""Assign Required Permissions"", ""description"": ""Assign the required roles to your chosen authentication method (Service Principal or Managed Identity) in the resource group.\n\n**For Service Principal (if you completed Step 1):**\n\n1. **Navigate to Your Resource Group**:\n - Open the **Azure Portal** and navigate to the **Resource Group** that contains your **Log Analytics Workspace** and where **Data Collection Rules (DCRs)** will be deployed.\n\n2. **Assign the Monitoring Metrics Publisher Role**:\n - In the **Resource Group**, click on **Access control (IAM)** from the left-hand menu.\n - Click **+ Add** and select **Add role assignment**.\n - In the **Role** tab, search for and select **Monitoring Metrics Publisher**.\n - Click **Next** to go to the **Members** tab.\n - Under **Assign access to**, select **User, group, or service principal**.\n - Click **+ Select members** and search for your registered application by name or client ID.\n - Select your application and click **Select**.\n - Click **Review + assign** twice to complete the assignment.\n\n3. **Assign the Monitoring Reader Role**:\n - Repeat the same process to assign the **Monitoring Reader** role:\n - Click **+ Add** and select **Add role assignment**.\n - In the **Role** tab, search for and select **Monitoring Reader**.\n - Follow the same member selection process as above.\n - Click **Review + assign** twice to complete the assignment.\n\n**For Azure Managed Identity:**\n\n1. **Create or Identify Your Managed Identity**:\n - If using **System-assigned Managed Identity**: Enable it on your Azure resource (VM, App Service, etc.).\n - If using **User-assigned Managed Identity**: Create one in your resource group if it doesn't exist.\n\n2. **Assign the Monitoring Metrics Publisher Role**:\n - Follow the same steps as above, but in the **Members** tab:\n - Under **Assign access to**, select **Managed identity**.\n - Click **+ Select members** and choose the appropriate managed identity type and select your identity.\n - Click **Select**, then **Review + assign** twice to complete.\n\n3. **Assign the Monitoring Reader Role**:\n - Repeat the process to assign the **Monitoring Reader** role to the same managed identity.\n\n**Required Permission Summary:**\nThe assigned roles provide the following capabilities:\n- **Monitoring Metrics Publisher**: Write data to Data Collection Endpoints (DCE) and send telemetry through Data Collection Rules (DCR)\n- **Monitoring Reader**: Read stream configuration and access Log Analytics workspace for ASIM table ingestion""}, {""title"": ""Deploy Azure Infrastructure"", ""description"": ""Deploy the required Data Collection Endpoint (DCE) and Data Collection Rules (DCR) for Microsoft Sentinel data lake tables using our ARM template.\n\n1. **Deploy to Azure**:\n - Click the Deploy to Azure button below to automatically deploy the required infrastructure:\n - [![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2FAzure-Sentinel%2Fmaster%2FSolutions%2FVirtualMetric%2520DataStream%2FData%2520Connectors%2FVirtualMetric-SentinelDataLake%2FDeployToAzure.json)\n - This will take you directly to the Azure portal to start the deployment.\n\n2. **Configure Deployment Parameters**:\n - On the custom deployment page, configure the following settings:\n \n **Project details:**\n - **Subscription**: Select your Azure subscription from the dropdown\n - **Resource group**: Select an existing resource group or click **Create new** to create a new one\n \n **Instance details:**\n - **Region**: Select the Azure region where your Log Analytics workspace is located (e.g., West Europe)\n - **Workspace**: Enter your Log Analytics workspace name\n - **DCE Name**: Provide a name for the Data Collection Endpoint (e.g., \""vmetric-dce\"")\n - **DCR Name Prefix**: Provide a prefix for the Data Collection Rules (e.g., \""vmetric-dcr\"")\n\n3. **Complete the Deployment**:\n - Click **Review + create** to validate the template.\n - Review the parameters and click **Create** to deploy the resources.\n - Wait for the deployment to complete (typically takes 2-5 minutes).\n\n4. **Verify Deployed Resources**:\n - After deployment, verify the following resources were created:\n - **Data Collection Endpoint (DCE)**: Check **Azure Portal > Monitor > Data Collection Endpoints**\n - **Data Collection Rules (DCRs)**: Check **Azure Portal > Monitor > Data Collection Rules**\n - **Copy the DCE Logs Ingestion URI** from the DCE **Overview** page (format: `https://..ingest.monitor.azure.com`)\n - **Copy the DCE Resource ID** from the DCE **Overview** page (format: `/subscriptions//resourceGroups//providers/Microsoft.Insights/dataCollectionEndpoints/`)\n - For each DCR, note the **Immutable ID** from the **Overview** page - you'll need these for VirtualMetric DataStream configuration.""}, {""title"": ""Configure VirtualMetric DataStream Integration"", ""description"": ""Set up VirtualMetric DataStream to send security telemetry to Microsoft Sentinel data lake tables.\n\n1. **Access VirtualMetric DataStream Configuration**:\n - Log into your **VirtualMetric DataStream** management console.\n - Navigate to **Fleet Management** > **Targets** section.\n - Click **Add new target** button.\n - Select **Microsoft Sentinel** target.\n\n2. **Configure General Settings**:\n - **Name**: Enter a name for your target (e.g., \""cus01-ms-sentinel\"")\n - **Description**: Optionally provide a description for the target configuration\n\n3. **Configure Azure Authentication** (choose based on Step 1):\n \n **For Service Principal Authentication:**\n - **Managed Identity for Azure**: Keep **Disabled**\n - **Tenant ID**: Enter the Directory (tenant) ID from Step 1\n - **Client ID**: Enter the Application (client) ID from Step 1\n - **Client Secret**: Enter the client secret value from Step 1\n \n **For Azure Managed Identity:**\n - **Managed Identity for Azure**: Set to **Enabled**\n\n4. **Configure Stream Properties**:\n - **Endpoint**: Choose your configuration method:\n - **For manual stream configuration**: Enter the DCE Logs Ingestion URI (format: `https://..ingest.monitor.azure.com`)\n - **For auto stream detection**: Enter the DCE Resource ID (format: `/subscriptions//resourceGroups//providers/Microsoft.Insights/dataCollectionEndpoints/`)\n - **Streams**: Select **Auto** for automatic stream detection, or configure specific streams if needed\n\n5. **Verify Data Ingestion in Microsoft Sentinel data lake**:\n - Return to your **Log Analytics Workspace**\n - Run sample queries on the ASIM tables to confirm data is being received:\n ```kql\n ASimNetworkSessionLogs\n | where TimeGenerated > ago(1h)\n | take 10\n ```\n - Check the **Microsoft Sentinel Overview** dashboard for new data sources and event counts.""}]}}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": false}}], ""customs"": [{""name"": ""App Registration or Azure Managed Identity"", ""description"": ""VirtualMetric DataStream requires an Entra ID identity to authenticate and send logs to Microsoft Sentinel data lake. You can choose between creating an App Registration with Client ID and Client Secret, or using Azure Managed Identity for enhanced security without credential management.""}, {""name"": ""Resource Group Role Assignment"", ""description"": ""The chosen identity (App Registration or Managed Identity) must be assigned to the resource group containing the Data Collection Endpoint with the following roles: Monitoring Metrics Publisher (for log ingestion) and Monitoring Reader (for reading stream configuration).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VirtualMetric%20DataStream/Data%20Connectors/VirtualMetric-SentinelDataLake/Template_SentinelDataLake.json","true" +"","VirusTotal","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/VirusTotal","azuresentinel","azure-sentinel-solution-virustotal","2022-07-31","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"CommonSecurityLog","Votiro","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Votiro","votirocybersecltd1670174946024","votiro_data_connector","","","","Votiro","Partner","https://support.votiro.com/","","domains","Votiro","Votiro","[Deprecated] Votiro Sanitization Engine Logs","The Votiro data connector allows you to easily connect your Votiro Event logs with Microsoft Sentinel, to view dashboards, create custom alerts, and improve investigation. Using Votiro on Microsoft Sentinel will provide you more insights into the sanitization results of files.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Set Votiro Endpoints to send Syslog messages in CEF format to the Forwarder machine. Make sure you to send the logs to port 514 TCP on the Forwarder machine's IP address.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Votiro/Data%20Connectors/VotiroEvents.json","true" +"Syslog","Watchguard Firebox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Watchguard%20Firebox","watchguard-technologies","watchguard_firebox_mss","2022-05-06","","","WatchGuard","Partner","https://www.watchguard.com/wgrd-support/contact-support","","domains","WatchguardFirebox","WatchGuard Technologies","[Deprecated] WatchGuard Firebox","WatchGuard Firebox (https://www.watchguard.com/wgrd-products/firewall-appliances and https://www.watchguard.com/wgrd-products/cloud-and-virtual-firewalls) is security products/firewall-appliances. Watchguard Firebox will send syslog to Watchguard Firebox collector agent.The agent then sends the message to the workspace.","[{""title"": """", ""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias WatchGuardFirebox and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Watchguard%20Firebox/Parsers/WatchGuardFirebox.txt) on the second line of the query, enter the hostname(s) of your WatchGuard Firebox device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update."", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux"", ""description"": ""Typically, you should install the agent on a different computer from the one on which the logs are generated.\n\n> Syslog logs are collected only from **Linux** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Configure the facilities you want to collect and their severities.\n\n1. Under workspace advanced settings **Configuration**, select **Data** and then **Syslog**.\n2. Select **Apply below configuration to my machines** and select the facilities and severities.\n3. Click **Save**."", ""instructions"": [{""parameters"": {""linkType"": ""OpenSyslogSettings""}, ""type"": ""InstallAgent""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""write permission is required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""delete"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Watchguard%20Firebox/Data%20Connectors/Connector_syslog_WatchGuardFirebox.json","true" +"","Watchlists Utilities","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Watchlists%20Utilities","azuresentinel","azure-sentinel-solution-watchlistsutilities","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"","Web Session Essentials","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Web%20Session%20Essentials","azuresentinel","azure-sentinel-solution-websession-domain","2023-06-29","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","Web Shells Threat Protection","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Web%20Shells%20Threat%20Protection","azuresentinel","azure-sentinel-solution-webshellsthreatprotection","2022-05-22","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"","Windows Firewall","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Windows%20Firewall","azuresentinel","azure-sentinel-solution-windowsfirewall","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","Windows Forwarded Events","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Windows%20Forwarded%20Events","azuresentinel","azure-sentinel-solution-windowsforwardedevents","2022-05-02","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","Windows Security Events","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Windows%20Security%20Events","azuresentinel","azure-sentinel-solution-securityevents","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"","Windows Server DNS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Windows%20Server%20DNS","azuresentinel","azure-sentinel-solution-dns","2022-05-11","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"CommonSecurityLog","WireX Network Forensics Platform","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/WireX%20Network%20Forensics%20Platform","wirexsystems1584682625009","wirex_network_forensics_platform_mss","2022-05-06","","","WireX Systems","Partner","https://wirexsystems.com/contact-us/","","domains","WireX_Systems_NFP","WireX_Systems","[Deprecated] WireX Network Forensics Platform via Legacy Agent","The WireX Systems data connector allows security professional to integrate with Microsoft Sentinel to allow you to further enrich your forensics investigations; to not only encompass the contextual content offered by WireX but to analyze data from other sources, and to create custom dashboards to give the most complete picture during a forensic investigation and to create custom workflows.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Contact WireX support (https://wirexsystems.com/contact-us/) in order to configure your NFP solution to send Syslog messages in CEF format to the proxy machine. Make sure that they central manager can send the logs to port 514 TCP on the machine's IP address.""}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/WireX%20Network%20Forensics%20Platform/Data%20Connectors/WireXsystemsNFP%281b%29.json","true" +"CommonSecurityLog","WireX Network Forensics Platform","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/WireX%20Network%20Forensics%20Platform","wirexsystems1584682625009","wirex_network_forensics_platform_mss","2022-05-06","","","WireX Systems","Partner","https://wirexsystems.com/contact-us/","","domains","WireX_Systems_NFPAma","WireX_Systems","[Deprecated] WireX Network Forensics Platform via AMA","The WireX Systems data connector allows security professional to integrate with Microsoft Sentinel to allow you to further enrich your forensics investigations; to not only encompass the contextual content offered by WireX but to analyze data from other sources, and to create custom dashboards to give the most complete picture during a forensic investigation and to create custom workflows.","[{""title"": """", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine""}, {""title"": ""Step B. Forward Common Event Format (CEF) logs to Syslog agent"", ""description"": ""Contact WireX support (https://wirexsystems.com/contact-us/) in order to configure your NFP solution to send Syslog messages in CEF format to the proxy machine. Make sure that they central manager can send the logs to port 514 TCP on the machine's IP address.""}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/WireX%20Network%20Forensics%20Platform/Data%20Connectors/template_WireXsystemsNFPAMA.json","true" +"CommonSecurityLog","WithSecureElementsViaConnector","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/WithSecureElementsViaConnector","withsecurecorporation","sentinel-solution-withsecure-via-connector","2022-11-03","2022-11-03","","WithSecure","Partner","https://www.withsecure.com/en/support","","domains","WithSecureElementsViaConnector","WithSecure","[Deprecated] WithSecure Elements via Connector","WithSecure Elements is a unified cloud-based cyber security platform.
By connecting WithSecure Elements via Connector to Microsoft Sentinel, security events can be received in Common Event Format (CEF) over syslog.
It requires deploying ""Elements Connector"" either on-prem or in cloud.
The Common Event Format (CEF) provides natively search & correlation, alerting and threat intelligence enrichment for each data log.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your WithSecurity solution and Sentinel. The machine can be on-prem environment, Microsoft Azure or other cloud based.\n> Linux needs to have `syslog-ng` and `python`/`python3` installed.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""For python3 use command below:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python3 cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward data from WithSecure Elements Connector to Syslog agent"", ""description"": ""This describes how to install and configure Elements Connector step by step."", ""innerSteps"": [{""title"": ""2.1 Order Connector subscription"", ""description"": ""If Connector subscription has not been ordered yet go to EPP in Elements Portal. Then navigate to Downloads and in Elements Connector section click 'Create subscription key' button. You can check Your subscription key in Subscriptions.""}, {""title"": ""2.2 Download Connector"", ""description"": ""Go to Downloads and in WithSecure Elements Connector section select correct installer.""}, {""title"": ""2.3 Create management API key"", ""description"": ""When in EPP open account settings in top right corner. Then select Get management API key. If key has been created earlier it can be read there as well.""}, {""title"": ""2.4 Install Connector"", ""description"": ""To install Elements Connector follow [Elements Connector Docs](https://www.withsecure.com/userguides/product.html#business/connector/latest/en/).""}, {""title"": ""2.5 Configure event forwarding"", ""description"": ""If api access has not been configured during installation follow [Configuring API access for Elements Connector](https://www.withsecure.com/userguides/product.html#business/connector/latest/en/task_F657F4D0F2144CD5913EE510E155E234-latest-en).\nThen go to EPP, then Profiles, then use For Connector from where you can see the connector profiles. Create a new profile (or edit an existing not read-only profile). In Event forwarding enable it. SIEM system address: **127.0.0.1:514**. Set format to **Common Event Format**. Protocol is **TCP**. Save profile and assign it to Elements Connector in Devices tab.""}]}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""For python3 use command below:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python3 cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/WithSecureElementsViaConnector/Data%20Connectors/WithSecureElementsViaConnector.json","true" +"WsSecurityEvents_CL","WithSecureElementsViaFunction","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/WithSecureElementsViaFunction","withsecurecorporation","sentinel-solution-withsecure-via-function","2024-02-22","2025-04-25","","WithSecure","Partner","https://www.withsecure.com/en/support","","domains","WithSecureElementsViaFunction","WithSecure","WithSecure Elements API (Azure Function)","WithSecure Elements is the unified cloud-based cyber security platform designed to reduce risk, complexity, and inefficiency.

Elevate your security from your endpoints to your cloud applications. Arm yourself against every type of cyber threat, from targeted attacks to zero-day ransomware.

WithSecure Elements combines powerful predictive, preventive, and responsive security capabilities - all managed and monitored through a single security center. Our modular structure and flexible pricing models give you the freedom to evolve. With our expertise and insight, you'll always be empowered - and you'll never be alone.

With Microsoft Sentinel integration, you can correlate [security events](https://connect.withsecure.com/api-reference/security-events#overview) data from the WithSecure Elements solution with data from other sources, enabling a rich overview of your entire environment and faster reaction to threats.

With this solution Azure Function is deployed to your tenant, polling periodically for the WithSecure Elements security events.

For more information visit our website at: [https://www.withsecure.com](https://www.withsecure.com).","[{""title"": ""1. Create WithSecure Elements API credentials"", ""description"": ""Follow the [user guide](https://connect.withsecure.com/getting-started/elements#getting-client-credentials) to create Elements API credentials. Save credentials in a safe place.""}, {""title"": ""2. Create Microsoft Entra application"", ""description"": ""Create new Microsoft Entra application and credentials. Follow [the instructions](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-microsoft-entra-application) and store values of **Directory (tenant) ID**, **Object ID**, **Application (client) ID** and **Client Secret** (from client credentials field). Remember to store Client Secret in a safe place.""}, {""title"": ""3. Deploy Function App"", ""description"": "">**NOTE:** This connector uses Azure Functions to pull logs from WithSecure Elements. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store Microsoft Entra client credentials and WithSecure Elements API client credentials in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": "">**IMPORTANT:** Before deploying the WithSecure Elements connector, have the Workspace Name (can be copied from the following), data from Microsoft Entra (Directory (tenant) ID, Object ID, Application (client) ID and Client Secret), as well as the WithSecure Elements client credentials, readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""workspaceName""], ""label"": ""Workspace Name""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Deploy all the resources related to the connector"", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-WithSecureElementsViaFunction-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the **Workspace ID**, **Entra Client ID**, **Entra Client Secret**, **Entra Tenant ID**, **Elements API Client ID**, **Elements API Client Secret**.\n>Note: If using Azure Key Vault secrets for any of the values above, use the`@Microsoft.KeyVault(SecretUri={Security Identifier})`schema in place of the string values. Refer to [Key Vault references documentation](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) for further details. \n4. You can also fill in optional fields: **Elements API url**, **Engine**, **Engine Group**. Use default value of **Elements API url** unless you have some special case. **Engine** and **Engine Group** map to [security events request parameters](https://connect.withsecure.com/api-reference/elements#post-/security-events/v1/security-events), fill in those parameters if you are interested only in events from specific engine or engine group, in case you want to receive all security events leave the fields with default values.\n5. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n6. Click **Purchase** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""WithSecure Elements API client credentials"", ""description"": ""Client credentials are required. [See the documentation to learn more.](https://connect.withsecure.com/getting-started/elements#getting-client-credentials)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/WithSecureElementsViaFunction/Data%20Connectors/WithSecureElementsViaFunction.json","true" +"WizAuditLogsV2_CL","Wiz","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Wiz","wizinc1627338511749","wizinc1627338511749_wiz_mss-sentinel","2023-06-20","","","Wiz","Partner","https://support.wiz.io/","","domains","Wiz","Wiz","Wiz","The Wiz connector allows you to easily send Wiz Issues, Vulnerability Findings, and Audit logs to Microsoft Sentinel.","[{""description"": "">**NOTE:** This connector: Uses Azure Functions to connect to Wiz API to pull Wiz Issues, Vulnerability Findings, and Audit Logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\nCreates an Azure Key Vault with all the required parameters stored as secrets.""}, {""description"": ""\nFollow the instructions on [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#collect-authentication-info-from-wiz) to get the erquired credentials."", ""title"": ""STEP 1 - Get your Wiz credentials""}, {""description"": ""\n>**IMPORTANT:** Before deploying the Wiz Connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Wiz credentials from the previous step."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}], ""title"": ""STEP 2 - Deploy the connector and the associated Azure Function""}, {""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-wiz-azuredeploy) \n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the following parameters: \n> - Choose **KeyVaultName** and **FunctionName** for the new resources \n >- Enter the following Wiz credentials from step 1: **WizAuthUrl**, **WizEndpointUrl**, **WizClientId**, and **WizClientSecret** \n>- Enter the Workspace credentials **AzureLogsAnalyticsWorkspaceId** and **AzureLogAnalyticsWorkspaceSharedKey**\n>- Choose the Wiz data types you want to send to Microsoft Sentinel, choose at least one from **Wiz Issues**, **Vulnerability Findings**, and **Audit Logs**.\n \n>- (optional) follow [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#optional-create-a-filter-for-wiz-queries) to add **IssuesQueryFilter**, **VulnerbailitiesQueryFilter**, and **AuditLogsQueryFilter**.\n \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n"", ""title"": ""Option 1: Deploy using the Azure Resource Manager (ARM) Template""}, {""description"": "">Follow [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#manual-deployment) to deploy the connector manually."", ""title"": ""Option 2: Manual Deployment of the Azure Function""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Wiz Service Account credentials"", ""description"": ""Ensure you have your Wiz service account client ID and client secret, API endpoint URL, and auth URL. Instructions can be found on [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#collect-authentication-info-from-wiz).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Wiz/Data%20Connectors/template_WIZ.json","true" +"WizAuditLogs_CL","Wiz","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Wiz","wizinc1627338511749","wizinc1627338511749_wiz_mss-sentinel","2023-06-20","","","Wiz","Partner","https://support.wiz.io/","","domains","Wiz","Wiz","Wiz","The Wiz connector allows you to easily send Wiz Issues, Vulnerability Findings, and Audit logs to Microsoft Sentinel.","[{""description"": "">**NOTE:** This connector: Uses Azure Functions to connect to Wiz API to pull Wiz Issues, Vulnerability Findings, and Audit Logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\nCreates an Azure Key Vault with all the required parameters stored as secrets.""}, {""description"": ""\nFollow the instructions on [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#collect-authentication-info-from-wiz) to get the erquired credentials."", ""title"": ""STEP 1 - Get your Wiz credentials""}, {""description"": ""\n>**IMPORTANT:** Before deploying the Wiz Connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Wiz credentials from the previous step."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}], ""title"": ""STEP 2 - Deploy the connector and the associated Azure Function""}, {""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-wiz-azuredeploy) \n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the following parameters: \n> - Choose **KeyVaultName** and **FunctionName** for the new resources \n >- Enter the following Wiz credentials from step 1: **WizAuthUrl**, **WizEndpointUrl**, **WizClientId**, and **WizClientSecret** \n>- Enter the Workspace credentials **AzureLogsAnalyticsWorkspaceId** and **AzureLogAnalyticsWorkspaceSharedKey**\n>- Choose the Wiz data types you want to send to Microsoft Sentinel, choose at least one from **Wiz Issues**, **Vulnerability Findings**, and **Audit Logs**.\n \n>- (optional) follow [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#optional-create-a-filter-for-wiz-queries) to add **IssuesQueryFilter**, **VulnerbailitiesQueryFilter**, and **AuditLogsQueryFilter**.\n \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n"", ""title"": ""Option 1: Deploy using the Azure Resource Manager (ARM) Template""}, {""description"": "">Follow [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#manual-deployment) to deploy the connector manually."", ""title"": ""Option 2: Manual Deployment of the Azure Function""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Wiz Service Account credentials"", ""description"": ""Ensure you have your Wiz service account client ID and client secret, API endpoint URL, and auth URL. Instructions can be found on [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#collect-authentication-info-from-wiz).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Wiz/Data%20Connectors/template_WIZ.json","true" +"WizIssuesV2_CL","Wiz","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Wiz","wizinc1627338511749","wizinc1627338511749_wiz_mss-sentinel","2023-06-20","","","Wiz","Partner","https://support.wiz.io/","","domains","Wiz","Wiz","Wiz","The Wiz connector allows you to easily send Wiz Issues, Vulnerability Findings, and Audit logs to Microsoft Sentinel.","[{""description"": "">**NOTE:** This connector: Uses Azure Functions to connect to Wiz API to pull Wiz Issues, Vulnerability Findings, and Audit Logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\nCreates an Azure Key Vault with all the required parameters stored as secrets.""}, {""description"": ""\nFollow the instructions on [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#collect-authentication-info-from-wiz) to get the erquired credentials."", ""title"": ""STEP 1 - Get your Wiz credentials""}, {""description"": ""\n>**IMPORTANT:** Before deploying the Wiz Connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Wiz credentials from the previous step."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}], ""title"": ""STEP 2 - Deploy the connector and the associated Azure Function""}, {""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-wiz-azuredeploy) \n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the following parameters: \n> - Choose **KeyVaultName** and **FunctionName** for the new resources \n >- Enter the following Wiz credentials from step 1: **WizAuthUrl**, **WizEndpointUrl**, **WizClientId**, and **WizClientSecret** \n>- Enter the Workspace credentials **AzureLogsAnalyticsWorkspaceId** and **AzureLogAnalyticsWorkspaceSharedKey**\n>- Choose the Wiz data types you want to send to Microsoft Sentinel, choose at least one from **Wiz Issues**, **Vulnerability Findings**, and **Audit Logs**.\n \n>- (optional) follow [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#optional-create-a-filter-for-wiz-queries) to add **IssuesQueryFilter**, **VulnerbailitiesQueryFilter**, and **AuditLogsQueryFilter**.\n \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n"", ""title"": ""Option 1: Deploy using the Azure Resource Manager (ARM) Template""}, {""description"": "">Follow [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#manual-deployment) to deploy the connector manually."", ""title"": ""Option 2: Manual Deployment of the Azure Function""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Wiz Service Account credentials"", ""description"": ""Ensure you have your Wiz service account client ID and client secret, API endpoint URL, and auth URL. Instructions can be found on [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#collect-authentication-info-from-wiz).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Wiz/Data%20Connectors/template_WIZ.json","true" +"WizIssues_CL","Wiz","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Wiz","wizinc1627338511749","wizinc1627338511749_wiz_mss-sentinel","2023-06-20","","","Wiz","Partner","https://support.wiz.io/","","domains","Wiz","Wiz","Wiz","The Wiz connector allows you to easily send Wiz Issues, Vulnerability Findings, and Audit logs to Microsoft Sentinel.","[{""description"": "">**NOTE:** This connector: Uses Azure Functions to connect to Wiz API to pull Wiz Issues, Vulnerability Findings, and Audit Logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\nCreates an Azure Key Vault with all the required parameters stored as secrets.""}, {""description"": ""\nFollow the instructions on [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#collect-authentication-info-from-wiz) to get the erquired credentials."", ""title"": ""STEP 1 - Get your Wiz credentials""}, {""description"": ""\n>**IMPORTANT:** Before deploying the Wiz Connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Wiz credentials from the previous step."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}], ""title"": ""STEP 2 - Deploy the connector and the associated Azure Function""}, {""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-wiz-azuredeploy) \n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the following parameters: \n> - Choose **KeyVaultName** and **FunctionName** for the new resources \n >- Enter the following Wiz credentials from step 1: **WizAuthUrl**, **WizEndpointUrl**, **WizClientId**, and **WizClientSecret** \n>- Enter the Workspace credentials **AzureLogsAnalyticsWorkspaceId** and **AzureLogAnalyticsWorkspaceSharedKey**\n>- Choose the Wiz data types you want to send to Microsoft Sentinel, choose at least one from **Wiz Issues**, **Vulnerability Findings**, and **Audit Logs**.\n \n>- (optional) follow [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#optional-create-a-filter-for-wiz-queries) to add **IssuesQueryFilter**, **VulnerbailitiesQueryFilter**, and **AuditLogsQueryFilter**.\n \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n"", ""title"": ""Option 1: Deploy using the Azure Resource Manager (ARM) Template""}, {""description"": "">Follow [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#manual-deployment) to deploy the connector manually."", ""title"": ""Option 2: Manual Deployment of the Azure Function""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Wiz Service Account credentials"", ""description"": ""Ensure you have your Wiz service account client ID and client secret, API endpoint URL, and auth URL. Instructions can be found on [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#collect-authentication-info-from-wiz).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Wiz/Data%20Connectors/template_WIZ.json","true" +"WizVulnerabilitiesV2_CL","Wiz","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Wiz","wizinc1627338511749","wizinc1627338511749_wiz_mss-sentinel","2023-06-20","","","Wiz","Partner","https://support.wiz.io/","","domains","Wiz","Wiz","Wiz","The Wiz connector allows you to easily send Wiz Issues, Vulnerability Findings, and Audit logs to Microsoft Sentinel.","[{""description"": "">**NOTE:** This connector: Uses Azure Functions to connect to Wiz API to pull Wiz Issues, Vulnerability Findings, and Audit Logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\nCreates an Azure Key Vault with all the required parameters stored as secrets.""}, {""description"": ""\nFollow the instructions on [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#collect-authentication-info-from-wiz) to get the erquired credentials."", ""title"": ""STEP 1 - Get your Wiz credentials""}, {""description"": ""\n>**IMPORTANT:** Before deploying the Wiz Connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Wiz credentials from the previous step."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}], ""title"": ""STEP 2 - Deploy the connector and the associated Azure Function""}, {""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-wiz-azuredeploy) \n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the following parameters: \n> - Choose **KeyVaultName** and **FunctionName** for the new resources \n >- Enter the following Wiz credentials from step 1: **WizAuthUrl**, **WizEndpointUrl**, **WizClientId**, and **WizClientSecret** \n>- Enter the Workspace credentials **AzureLogsAnalyticsWorkspaceId** and **AzureLogAnalyticsWorkspaceSharedKey**\n>- Choose the Wiz data types you want to send to Microsoft Sentinel, choose at least one from **Wiz Issues**, **Vulnerability Findings**, and **Audit Logs**.\n \n>- (optional) follow [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#optional-create-a-filter-for-wiz-queries) to add **IssuesQueryFilter**, **VulnerbailitiesQueryFilter**, and **AuditLogsQueryFilter**.\n \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n"", ""title"": ""Option 1: Deploy using the Azure Resource Manager (ARM) Template""}, {""description"": "">Follow [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#manual-deployment) to deploy the connector manually."", ""title"": ""Option 2: Manual Deployment of the Azure Function""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Wiz Service Account credentials"", ""description"": ""Ensure you have your Wiz service account client ID and client secret, API endpoint URL, and auth URL. Instructions can be found on [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#collect-authentication-info-from-wiz).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Wiz/Data%20Connectors/template_WIZ.json","true" +"WizVulnerabilities_CL","Wiz","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Wiz","wizinc1627338511749","wizinc1627338511749_wiz_mss-sentinel","2023-06-20","","","Wiz","Partner","https://support.wiz.io/","","domains","Wiz","Wiz","Wiz","The Wiz connector allows you to easily send Wiz Issues, Vulnerability Findings, and Audit logs to Microsoft Sentinel.","[{""description"": "">**NOTE:** This connector: Uses Azure Functions to connect to Wiz API to pull Wiz Issues, Vulnerability Findings, and Audit Logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.\nCreates an Azure Key Vault with all the required parameters stored as secrets.""}, {""description"": ""\nFollow the instructions on [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#collect-authentication-info-from-wiz) to get the erquired credentials."", ""title"": ""STEP 1 - Get your Wiz credentials""}, {""description"": ""\n>**IMPORTANT:** Before deploying the Wiz Connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), as well as the Wiz credentials from the previous step."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}], ""title"": ""STEP 2 - Deploy the connector and the associated Azure Function""}, {""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-wiz-azuredeploy) \n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n3. Enter the following parameters: \n> - Choose **KeyVaultName** and **FunctionName** for the new resources \n >- Enter the following Wiz credentials from step 1: **WizAuthUrl**, **WizEndpointUrl**, **WizClientId**, and **WizClientSecret** \n>- Enter the Workspace credentials **AzureLogsAnalyticsWorkspaceId** and **AzureLogAnalyticsWorkspaceSharedKey**\n>- Choose the Wiz data types you want to send to Microsoft Sentinel, choose at least one from **Wiz Issues**, **Vulnerability Findings**, and **Audit Logs**.\n \n>- (optional) follow [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#optional-create-a-filter-for-wiz-queries) to add **IssuesQueryFilter**, **VulnerbailitiesQueryFilter**, and **AuditLogsQueryFilter**.\n \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n"", ""title"": ""Option 1: Deploy using the Azure Resource Manager (ARM) Template""}, {""description"": "">Follow [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#manual-deployment) to deploy the connector manually."", ""title"": ""Option 2: Manual Deployment of the Azure Function""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Wiz Service Account credentials"", ""description"": ""Ensure you have your Wiz service account client ID and client secret, API endpoint URL, and auth URL. Instructions can be found on [Wiz documentation](https://docs.wiz.io/wiz-docs/docs/azure-sentinel-native-integration#collect-authentication-info-from-wiz).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Wiz/Data%20Connectors/template_WIZ.json","true" +"ASimAuditEventLogs","Workday","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Workday","azuresentinel","azure-sentinel-solution-workday","2024-02-15","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","WorkdayCCPDefinition","Microsoft","Workday User Activity","The [Workday](https://www.workday.com/) User Activity data connector provides the capability to ingest User Activity Logs from [Workday API](https://community.workday.com/sites/default/files/file-hosting/restapi/index.html#privacy/v1/get-/activityLogging) into Microsoft Sentinel.","[{""description"": ""1) In Workday, access the \""Edit Tenant Setup - Security\"" task, verify \""OAuth 2.0 Settings\"" section, make sure that the \""OAuth 2.0 Clients Enabled\"" check box is ticked. \n 2) In Workday, access the \""Edit Tenant Setup - System\"" task, verify \""User Activity Logging\"" section, make sure that the \""Enable User Activity Logging\"" check box is ticked. \n 3) In Workday, access the \""Register API Client\"" task.\n 4) Define the Client Name, select the \""Client Grant Type\"": \""Authorization Code Grant\"" and then select \""Access Token Type\"": \""Bearer\""\n 5) Enter the \""Redirection URI\"": https://portal.azure.com/TokenAuthorize/ExtensionName/Microsoft_Azure_Security_Insights \n 6) In section \""Scope (Functional Areas)\"", select \""System\"" and click OK at the bottom \n 7) Copy the Client ID and Client Secret before navigating away from the page, and store it securely. \n 8) In Sentinel, in the connector page - provide required Token, Authorization and User Activity Logs Endpoints, along with Client ID and Client Secret from previous step. Then click \""Connect\"". \n 9) A Workday pop up will appear to complete the OAuth2 authentication and authorization of the API client. Here you need to provide credentials for Workday account with \""System Auditing\"" permissions in Workday (can be either Workday account or Integration System User). \n 10) Once that's complete, the message will be displayed to authorize your API client \n"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Token Endpoint"", ""placeholder"": ""https://wd2-impl-services1.workday.com/ccx/oauth2/{tenantName}/token"", ""type"": ""text"", ""name"": ""tokenEndpoint""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""Authorization Endpoint"", ""placeholder"": ""https://impl.workday.com/{tenantName}/authorize"", ""type"": ""text"", ""name"": ""authorizationEndpoint""}}, {""type"": ""Textbox"", ""parameters"": {""label"": ""User Activity Logs Endpoint, it ends with /activityLogging "", ""placeholder"": ""https://wd2-impl-services1.workday.com/ccx/api/privacy/v1/{tenantName}/activityLogging"", ""type"": ""text"", ""name"": ""apiEndpoint""}}, {""type"": ""OAuthForm"", ""parameters"": {""clientIdLabel"": ""Client ID"", ""clientSecretLabel"": ""Client Secret"", ""connectButtonLabel"": ""Connect"", ""disconnectButtonLabel"": ""Disconnect""}}], ""title"": ""Connect to Workday to start collecting user activity logs in Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""Read and Write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""Workday User Activity API access"", ""description"": ""Access to the Workday user activity API through Oauth are required. The API Client needs to have the scope: System and it needs to be authorized by an account with System Auditing permissions.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Workday/Data%20Connectors/Workday_ccp/Workday_DataConnectorDefinition.json","true" +"Workplace_Facebook_CL","Workplace from Facebook","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Workplace%20from%20Facebook","azuresentinel","azure-sentinel-solution-workplacefromfacebook","2022-05-18","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","WorkplaceFacebook","Facebook","Workplace from Facebook","The [Workplace](https://www.workplace.com/) data connector provides the capability to ingest common Workplace events into Microsoft Sentinel through Webhooks. Webhooks enable custom integration apps to subscribe to events in Workplace and receive updates in real time. When a change occurs in Workplace, an HTTPS POST request with event information is sent to a callback data connector URL. Refer to [Webhooks documentation](https://developers.facebook.com/docs/workplace/reference/webhooks) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""title"": """", ""description"": "">**NOTE:** This data connector uses Azure Functions based on HTTP Trigger for waiting POST requests with logs to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Functions App.""}, {""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias WorkplaceFacebook and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Workplace%20from%20Facebook/Parsers/Workplace_Facebook.txt) on the second line of the query, enter the hostname(s) of your Workplace Facebook device(s) and any other unique identifiers for the logstream. The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Workplace**\n\n Follow the instructions to configure Webhooks.\n\n1. Log in to the Workplace with Admin user credentials.\n2. In the Admin panel, click **Integrations**.\n3. In the **All integrations** view, click **Create custom integration**\n4. Enter the name and description and click **Create**.\n5. In the **Integration details** panel show **App secret** and copy.\n6. In the **Integration permissions** pannel set all read permissions. Refer to [permission page](https://developers.facebook.com/docs/workplace/reference/permissions) for details.\n7. Now proceed to STEP 2 to follow the steps (listed in Option 1 or 2) to Deploy the Azure Function.\n8. Enter the requested parameters and also enter a Token of choice. Copy this Token / Note it for the upcoming step.\n9. After the deployment of Azure Functions completes successfully, open Function App page, select your app, go to **Functions**, click **Get Function URL** and copy this / Note it for the upcoming step.\n10. Go back to Workplace from Facebook. In the **Configure webhooks** panel on each Tab set **Callback URL** as the same value that you copied in point 9 above and Verify token as the same\n value you copied in point 8 above which was obtained during STEP 2 of Azure Functions deployment.\n11. Click Save.""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Functions**\n\n>**IMPORTANT:** Before deploying the Workplace data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Workplace data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-WorkplaceFacebook-azuredeploy) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-WorkplaceFacebook-azuredeploy-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Location**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **WorkplaceVerifyToken** (can be any expression, copy and save it for STEP 1), **WorkplaceAppSecret** and deploy. \n4. Mark the checkbox labeled **I agree to the terms and conditions stated above**. \n5. Click **Purchase** to deploy.\n6. After deploying open Function App page, select your app, go to the **Functions** and click **Get Function Url** copy it and follow p.7 from STEP 1.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Sophos Endpoint Protection data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-WorkplaceFacebook-functionapp) file. Extract archive to your local development computer.\n2. Follow the [function app manual deployment instructions](https://github.com/Azure/Azure-Sentinel/blob/master/DataConnectors/AzureFunctionsManualDeployment.md#function-app-manual-deployment-instructions) to deploy the Azure Functions app using VSCode.\n3. After successful deployment of the function app, follow next steps for configuring it.""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tWorkplaceAppSecret\n\t\tWorkplaceVerifyToken\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n> - Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""Webhooks Credentials/permissions"", ""description"": ""WorkplaceAppSecret, WorkplaceVerifyToken, Callback URL are required for working Webhooks. See the documentation to learn more about [configuring Webhooks](https://developers.facebook.com/docs/workplace/reference/webhooks), [configuring permissions](https://developers.facebook.com/docs/workplace/reference/permissions). ""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Workplace%20from%20Facebook/Data%20Connectors/WorkplaceFacebook/WorkplaceFacebook_Webhooks_FunctionApp.json","true" +"ZeroFoxAlertPoller_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxAlertsDefinition","ZeroFox Enterprise","ZeroFox Enterprise - Alerts (Polling CCF)","Collects alerts from ZeroFox API.","[{""description"": ""Connect ZeroFox to Microsoft Sentinel"", ""instructions"": [{""type"": ""Textbox"", ""parameters"": {""label"": ""Provide your ZeroFox PAT"", ""placeholder"": ""Zerofox PAT"", ""type"": ""password"", ""name"": ""apikey"", ""validations"": {""required"": true}}}, {""parameters"": {""label"": ""toggle"", ""name"": ""toggle""}, ""type"": ""ConnectionToggleButton""}], ""title"": ""Connect ZeroFox to Microsoft Sentinel""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/solutions"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true, ""write"": true, ""read"": true, ""delete"": true}}], ""customs"": [{""name"": ""ZeroFox Personal Access Token (PAT)"", ""description"": ""A ZeroFox PAT is required. You can get it in Data Connectors > [API Data Feeds](https://cloud.zerofox.com/data_connectors/api).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/Alerts/ZeroFoxAlerts_ConnectorDefinition.json","true" +"ZeroFox_CTI_C2_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" +"ZeroFox_CTI_advanced_dark_web_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" +"ZeroFox_CTI_botnet_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" +"ZeroFox_CTI_breaches_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" +"ZeroFox_CTI_compromised_credentials_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" +"ZeroFox_CTI_credit_cards_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" +"ZeroFox_CTI_dark_web_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" +"ZeroFox_CTI_discord_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" +"ZeroFox_CTI_disruption_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" +"ZeroFox_CTI_email_addresses_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" +"ZeroFox_CTI_exploits_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" +"ZeroFox_CTI_irc_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" +"ZeroFox_CTI_malware_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" +"ZeroFox_CTI_national_ids_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" +"ZeroFox_CTI_phishing_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" +"ZeroFox_CTI_phone_numbers_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" +"ZeroFox_CTI_ransomware_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" +"ZeroFox_CTI_telegram_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" +"ZeroFox_CTI_threat_actors_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" +"ZeroFox_CTI_vulnerabilities_CL","ZeroFox","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox","zerofoxinc1695922129370","zerofox-sentinel-connector","2023-07-28","","","ZeroFox","Partner","https://www.zerofox.com/contact-us/","","domains","ZeroFoxCTIDataConnector","ZeroFox","ZeroFox CTI","The ZeroFox CTI data connectors provide the capability to ingest the different [ZeroFox](https://www.zerofox.com/threat-intelligence/) cyber threat intelligence alerts into Microsoft Sentinel.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the ZeroFox CTI REST API to pull logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""title"": """", ""description"": ""**STEP 1 - Retrieval of ZeroFox credentials:**\n\n Follow these instructions for set up logging and obtain credentials. \n1. [Log into ZeroFox's website.](https://cloud.zerofox.com/login) using your username and password \n2 - Click into the Settings button and go to the Data Connectors Section. \n3 - Select the API DATA FEEDS tab and head to the bottom of the page, select <> in the API Information box, to obtain a Personal Access Token to be used along with your username.""}, {""title"": """", ""description"": ""**STEP 2 - Deploy the Azure Function data connectors using the Azure Resource Manager template: **\n\n>**IMPORTANT:** Before deploying the ZeroFox CTI data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following), readily available."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""title"": ""Preparing resources for deployment."", ""description"": ""1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-zerofox-azuredeploy)\n2. Select the preferred **Subscription**, **Resource Group**, Log analytics Workspace and **Location**. \n3. Enter the **Workspace ID**, **Workspace Key**, **ZeroFox Username**, **ZeroFox Personal Access Token**\n4.\n5. Click **Review + Create** to deploy.""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""ZeroFox API Credentials/permissions"", ""description"": ""**ZeroFox Username**, **ZeroFox Personal Access Token** are required for ZeroFox CTI REST API.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroFox/Data%20Connectors/CTI/ZeroFoxCTI.json","true" +"ZNSegmentAuditNativePoller_CL","ZeroNetworks","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroNetworks","zeronetworksltd1629013803351","azure-sentinel-solution-znsegmentaudit","2022-06-06","2025-09-17","","Zero Networks","Partner","https://zeronetworks.com","","domains","ZeroNetworksSegmentAuditNativePoller","Zero Networks","Zero Networks Segment Audit","The [Zero Networks Segment](https://zeronetworks.com/) Audit data connector provides the capability to ingest Zero Networks Audit events into Microsoft Sentinel through the REST API. This data connector uses Microsoft Sentinel native polling capability.","[{""title"": ""Connect Zero Networks to Microsoft Sentinel"", ""description"": ""Enable Zero Networks audit Logs."", ""instructions"": [{""parameters"": {""enable"": ""true""}, ""type"": ""APIKey""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)"", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Zero Networks API Token"", ""description"": ""**ZeroNetworksAPIToken** is required for REST API. See the API Guide and follow the instructions for obtaining credentials.""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroNetworks/Data%20Connectors/SegmentNativePollerConnector/azuredeploy_ZeroNetworks_Segment_native_poller_connector.json","true" +"","ZeroTrust(TIC3.0)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZeroTrust%28TIC3.0%29","azuresentinel","azure-sentinel-solution-zerotrust","2021-10-20","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","","","","","","","","false" +"ZimperiumMitigationLog_CL","Zimperium Mobile Threat Defense","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Zimperium%20Mobile%20Threat%20Defense","zimperiuminc","zimperium_mobile_threat_defense_mss","2022-05-02","","","Zimperium","Partner","https://www.zimperium.com/support/","","domains","ZimperiumMtdAlerts","Zimperium","Zimperium Mobile Threat Defense","Zimperium Mobile Threat Defense connector gives you the ability to connect the Zimperium threat log with Microsoft Sentinel to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's mobile threat landscape and enhances your security operation capabilities.","[{""title"": ""Configure and connect Zimperium MTD"", ""description"": ""1. In zConsole, click **Manage** on the navigation bar.\n2. Click the **Integrations** tab.\n3. Click the **Threat Reporting** button and then the **Add Integrations** button.\n4. Create the Integration:\n - From the available integrations, select Microsoft Sentinel.\n - Enter your workspace id and primary key from the fields below, click **Next**.\n - Fill in a name for your Microsoft Sentinel integration.\n - Select a Filter Level for the threat data you wish to push to Microsoft Sentinel.\n - Click **Finish**\n5. For additional instructions, please refer to the [Zimperium customer support portal](https://support.zimperium.com)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Zimperium%20Mobile%20Threat%20Defense/Data%20Connectors/Zimperium%20MTD%20Alerts.json","true" +"ZimperiumThreatLog_CL","Zimperium Mobile Threat Defense","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Zimperium%20Mobile%20Threat%20Defense","zimperiuminc","zimperium_mobile_threat_defense_mss","2022-05-02","","","Zimperium","Partner","https://www.zimperium.com/support/","","domains","ZimperiumMtdAlerts","Zimperium","Zimperium Mobile Threat Defense","Zimperium Mobile Threat Defense connector gives you the ability to connect the Zimperium threat log with Microsoft Sentinel to view dashboards, create custom alerts, and improve investigation. This gives you more insight into your organization's mobile threat landscape and enhances your security operation capabilities.","[{""title"": ""Configure and connect Zimperium MTD"", ""description"": ""1. In zConsole, click **Manage** on the navigation bar.\n2. Click the **Integrations** tab.\n3. Click the **Threat Reporting** button and then the **Add Integrations** button.\n4. Create the Integration:\n - From the available integrations, select Microsoft Sentinel.\n - Enter your workspace id and primary key from the fields below, click **Next**.\n - Fill in a name for your Microsoft Sentinel integration.\n - Select a Filter Level for the threat data you wish to push to Microsoft Sentinel.\n - Click **Finish**\n5. For additional instructions, please refer to the [Zimperium customer support portal](https://support.zimperium.com)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Zimperium%20Mobile%20Threat%20Defense/Data%20Connectors/Zimperium%20MTD%20Alerts.json","true" +"","Zinc Open Source","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Zinc%20Open%20Source","azuresentinel","azure-sentinel-solution-zincopensource","2022-10-03","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","","","","","","","","false" +"Zoom_CL","ZoomReports","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZoomReports","azuresentinel","azure-sentinel-solution-zoomreports","2022-05-23","","","Microsoft Corporation","Microsoft","https://support.microsoft.com/","","domains","Zoom","Zoom","Zoom Reports","The [Zoom](https://zoom.us/) Reports data connector provides the capability to ingest [Zoom Reports](https://developers.zoom.us/docs/api/rest/reference/zoom-api/methods/#tag/Reports) events into Microsoft Sentinel through the REST API. Refer to [API documentation](https://developers.zoom.us/docs/api/) for more information. The connector provides ability to get events which helps to examine potential security risks, analyze your team's use of collaboration, diagnose configuration problems and more.","[{""title"": """", ""description"": "">**NOTE:** This connector uses Azure Functions to connect to the Zoom API to pull its logs into Microsoft Sentinel. This might result in additional data ingestion costs. Check the [Azure Functions pricing page](https://azure.microsoft.com/pricing/details/functions/) for details.""}, {""title"": """", ""description"": "">**(Optional Step)** Securely store workspace and API authorization key(s) or token(s) in Azure Key Vault. Azure Key Vault provides a secure mechanism to store and retrieve key values. [Follow these instructions](https://docs.microsoft.com/azure/app-service/app-service-key-vault-references) to use Azure Key Vault with an Azure Function App.""}, {""description"": ""**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected which is deployed as part of the solution. To view the function code in Log Analytics, open Log Analytics/Microsoft Sentinel Logs blade, click Functions and search for the alias Zoom and load the function code or click [here](https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZoomReports/Parsers/Zoom.yaml). The function usually takes 10-15 minutes to activate after solution installation/update.""}, {""title"": """", ""description"": ""**STEP 1 - Configuration steps for the Zoom API**\n\n [Follow the instructions](https://developers.zoom.us/docs/internal-apps/create/) to obtain the credentials. \n""}, {""title"": """", ""description"": ""**STEP 2 - Choose ONE from the following two deployment options to deploy the connector and the associated Azure Function**\n\n>**IMPORTANT:** Before deploying the Zoom Reports data connector, have the Workspace ID and Workspace Primary Key (can be copied from the following)."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}, {""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Option 1 - Azure Resource Manager (ARM) Template"", ""description"": ""Use this method for automated deployment of the Zoom Audit data connector using an ARM Tempate.\n\n1. Click the **Deploy to Azure** button below. \n\n\t[![Deploy To Azure](https://aka.ms/deploytoazurebutton)](https://aka.ms/sentinel-ZoomAPI-azuredeployV2) [![Deploy to Azure Gov](https://aka.ms/deploytoazuregovbutton)](https://aka.ms/sentinel-ZoomAPI-azuredeployV2-gov)\n2. Select the preferred **Subscription**, **Resource Group** and **Region**. \n> **NOTE:** Within the same resource group, you can't mix Windows and Linux apps in the same region. Select existing resource group without Windows apps in it or create new resource group.\n3. Enter the **AccountID**, **ClientID**, **ClientSecret**, **WorkspaceID**, **WorkspaceKey**, **Function Name** and click Review + create. \n4. Finally click **Create** to deploy.""}, {""title"": ""Option 2 - Manual Deployment of Azure Functions"", ""description"": ""Use the following step-by-step instructions to deploy the Zoom Reports data connector manually with Azure Functions (Deployment via Visual Studio Code)."", ""instructions"": [{""parameters"": {""instructionSteps"": [{""title"": ""Step 1 - Deploy a Function App"", ""description"": ""**NOTE:** You will need to [prepare VS code](https://docs.microsoft.com/azure/azure-functions/functions-create-first-function-python#prerequisites) for Azure function development.\n\n1. Download the [Azure Function App](https://aka.ms/sentinel-ZoomAPI-functionapp) file. Extract archive to your local development computer.\n2. Start VS Code. Choose File in the main menu and select Open Folder.\n3. Select the top level folder from extracted files.\n4. Choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose the **Deploy to function app** button.\nIf you aren't already signed in, choose the Azure icon in the Activity bar, then in the **Azure: Functions** area, choose **Sign in to Azure**\nIf you're already signed in, go to the next step.\n5. Provide the following information at the prompts:\n\n\ta. **Select folder:** Choose a folder from your workspace or browse to one that contains your function app.\n\n\tb. **Select Subscription:** Choose the subscription to use.\n\n\tc. Select **Create new Function App in Azure** (Don't choose the Advanced option)\n\n\td. **Enter a globally unique name for the function app:** Type a name that is valid in a URL path. The name you type is validated to make sure that it's unique in Azure Functions. (e.g. ZoomXXXXX).\n\n\te. **Select a runtime:** Choose Python 3.11.\n\n\tf. Select a location for new resources. For better performance and lower costs choose the same [region](https://azure.microsoft.com/regions/) where Microsoft Sentinel is located.\n\n6. Deployment will begin. A notification is displayed after your function app is created and the deployment package is applied.\n7. Go to Azure Portal for the Function App configuration""}, {""title"": ""Step 2 - Configure the Function App"", ""description"": ""1. In the Function App, select the Function App Name and select **Configuration**.\n2. In the **Application settings** tab, select ** New application setting**.\n3. Add each of the following application settings individually, with their respective string values (case-sensitive): \n\t\tAccountID\n\t\tClientID\n\t\tClientSecret\n\t\tWorkspaceID\n\t\tWorkspaceKey\n\t\tlogAnalyticsUri (optional)\n Use logAnalyticsUri to override the log analytics API endpoint for dedicated cloud. For example, for public cloud, leave the value empty; for Azure GovUS cloud environment, specify the value in the following format: `https://.ods.opinsights.azure.us`.\n4. Once all application settings have been entered, click **Save**.""}]}, ""type"": ""InstructionStepsGroup""}]}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions on the workspace are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""Microsoft.Web/sites permissions"", ""description"": ""Read and write permissions to Azure Functions to create a Function App is required. [See the documentation to learn more about Azure Functions](https://docs.microsoft.com/azure/azure-functions/).""}, {""name"": ""REST API Credentials/permissions"", ""description"": ""**AccountID**, **ClientID** and **ClientSecret** are required for Zoom API. [See the documentation to learn more about Zoom API](https://developers.zoom.us/docs/internal-apps/create/). [Follow the instructions for Zoom API configurations](https://aka.ms/sentinel-zoomreports-readme).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/ZoomReports/Data%20Connectors/ZoomReports_API_FunctionApp.json","true" +"","Zscaler Internet Access","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Zscaler%20Internet%20Access","zscaler1579058425289","zscaler_internet_access_mss","2022-05-25","","","Zscaler","Partner","https://help.zscaler.com/submit-ticket-links","","domains","","","","","","","","false" +"ZPA_CL","Zscaler Private Access (ZPA)","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Zscaler%20Private%20Access%20%28ZPA%29","azuresentinel","azure-sentinel-solution-zscalerprivateaccess","2022-01-31","","","Microsoft Corporation","Microsoft","https://support.microsoft.com","","domains","ZscalerPrivateAccess","Zscaler","[Deprecated] Zscaler Private Access","The [Zscaler Private Access (ZPA)](https://help.zscaler.com/zpa/what-zscaler-private-access) data connector provides the capability to ingest [Zscaler Private Access events](https://help.zscaler.com/zpa/log-streaming-service) into Microsoft Sentinel. Refer to [Zscaler Private Access documentation](https://help.zscaler.com/zpa) for more information.","[{""title"": """", ""description"": "">**NOTE:** This data connector depends on a parser based on a Kusto Function to work as expected. [Follow these steps](https://aka.ms/sentinel-ZscalerPrivateAccess-parser) to create the Kusto Functions alias, **ZPAEvent**"", ""instructions"": []}, {""title"": """", ""description"": "">**NOTE:** This data connector has been developed using Zscaler Private Access version: 21.67.1"", ""instructions"": []}, {""title"": ""1. Install and onboard the agent for Linux or Windows"", ""description"": ""Install the agent on the Server where the Zscaler Private Access logs are forwarded.\n\n> Logs from Zscaler Private Access Server deployed on Linux or Windows servers are collected by **Linux** or **Windows** agents."", ""instructions"": [{""parameters"": {""title"": ""Choose where to install the Linux agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Linux Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Linux Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnLinuxNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""instructions"": [{""parameters"": {""title"": ""Choose where to install the Windows agent:"", ""instructionSteps"": [{""title"": ""Install agent on Azure Windows Virtual Machine"", ""description"": ""Select the machine to install the agent on and then click **Connect**."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnVirtualMachine""}, ""type"": ""InstallAgent""}]}, {""title"": ""Install agent on a non-Azure Windows Machine"", ""description"": ""Download the agent on the relevant machine and follow the instructions."", ""instructions"": [{""parameters"": {""linkType"": ""InstallAgentOnNonAzure""}, ""type"": ""InstallAgent""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Configure the logs to be collected"", ""description"": ""Follow the configuration steps below to get Zscaler Private Access logs into Microsoft Sentinel. Refer to the [Azure Monitor Documentation](https://docs.microsoft.com/azure/azure-monitor/agents/data-sources-json) for more details on these steps.\nZscaler Private Access logs are delivered via Log Streaming Service (LSS). Refer to [LSS documentation](https://help.zscaler.com/zpa/about-log-streaming-service) for detailed information\n1. Configure [Log Receivers](https://help.zscaler.com/zpa/configuring-log-receiver). While configuring a Log Receiver, choose **JSON** as **Log Template**.\n2. Download config file [zpa.conf](https://aka.ms/sentinel-ZscalerPrivateAccess-conf) \n\t\twget -v https://aka.ms/sentinel-zscalerprivateaccess-conf -O zpa.conf\n3. Login to the server where you have installed Azure Log Analytics agent.\n4. Copy zpa.conf to the /etc/opt/microsoft/omsagent/**workspace_id**/conf/omsagent.d/ folder.\n5. Edit zpa.conf as follows:\n\n\t a. specify port which you have set your Zscaler Log Receivers to forward logs to (line 4)\n\n\t b. zpa.conf uses the port **22033** by default. Ensure this port is not being used by any other source on your server\n\n\t c. If you would like to change the default port for **zpa.conf** make sure that it should not get conflict with default AMA agent ports I.e.(For example CEF uses TCP port **25226** or **25224**) \n\n\t d. replace **workspace_id** with real value of your Workspace ID (lines 14,15,16,19)\n5. Save changes and restart the Azure Log Analytics agent for Linux service with the following command:\n\t\tsudo /opt/microsoft/omsagent/bin/service_control restart"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/Zscaler%20Private%20Access%20%28ZPA%29/Data%20Connectors/Connector_LogAnalytics_agent_Zscaler_ZPA.json","true" +"NCProtectUAL_CL","archTIS","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/archTIS","nucleuscyber","nc-protect-azure-sentinel-data-connector","2021-10-20","","","archTIS","Partner","https://www.archtis.com/nc-protect-support/","","domains","NucleusCyberNCProtect","archTIS","NC Protect","[NC Protect Data Connector (archtis.com)](https://info.archtis.com/get-started-with-nc-protect-sentinel-data-connector) provides the capability to ingest user activity logs and events into Microsoft Sentinel. The connector provides visibility into NC Protect user activity logs and events in Microsoft Sentinel to improve monitoring and investigation capabilities","[{""title"": """", ""description"": ""1. Install NC Protect into your Azure Tenancy\n2. Log into the NC Protect Administration site\n3. From the left hand navigation menu, select General -> User Activity Monitoring\n4. Tick the checkbox to Enable SIEM and click the Configure button\n5. Select Microsoft Sentinel as the Application and complete the configuration using the information below\n6. Click Save to activate the connection\n"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Workspace ID""}, ""type"": ""CopyableLabel""}, {""parameters"": {""fillWith"": [""PrimaryKey""], ""label"": ""Primary Key""}, ""type"": ""CopyableLabel""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""write"": true, ""read"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""name"": ""NC Protect"", ""description"": ""You must have a running instance of NC Protect for O365. Please [contact us](https://www.archtis.com/data-discovery-classification-protection-software-secure-collaboration/).""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/archTIS/Data%20Connectors/NucleusCyberNCProtect.json","true" +"CommonSecurityLog","iboss","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/iboss","iboss","iboss-sentinel-connector","2022-02-15","","","iboss","Partner","https://www.iboss.com/contact-us/","","domains","iboss","iboss","[Deprecated] iboss via Legacy Agent","The [iboss](https://www.iboss.com) data connector enables you to seamlessly connect your Threat Console to Microsoft Sentinel and enrich your instance with iboss URL event logs. Our logs are forwarded in Common Event Format (CEF) over Syslog and the configuration required can be completed on the iboss platform without the use of a proxy. Take advantage of our connector to garner critical data points and gain insight into security threats.","[{""title"": ""1. Configure a dedicated proxy Linux machine"", ""description"": ""If using the iboss gov environment or there is a preference to forward the logs to a dedicated proxy Linux machine, proceed with this step. In all other cases, please advance to step two."", ""innerSteps"": [{""title"": ""1.1 Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace""}, {""title"": ""1.2 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the dedicated proxy Linux machine between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.3 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n> 2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Forward Common Event Format (CEF) logs"", ""description"": ""Set your Threat Console to send Syslog messages in CEF format to your Azure workspace. Make note of your Workspace ID and Primary Key within your Log Analytics Workspace (Select the workspace from the Log Analytics workspaces menu in the Azure portal. Then select Agents management in the Settings section). \n\n>1. Navigate to Reporting & Analytics inside your iboss Console\n\n>2. Select Log Forwarding -> Forward From Reporter\n\n>3. Select Actions -> Add Service\n\n>4. Toggle to Microsoft Sentinel as a Service Type and input your Workspace ID/Primary Key along with other criteria. If a dedicated proxy Linux machine has been configured, toggle to Syslog as a Service Type and configure the settings to point to your dedicated proxy Linux machine\n\n>5. Wait one to two minutes for the setup to complete\n\n>6. Select your Microsoft Sentinel Service and verify the Microsoft Sentinel Setup Status is Successful. If a dedicated proxy Linux machine has been configured, you may proceed with validating your connection""}, {""title"": ""3. Validate connection"", ""description"": ""Open Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace""}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy (Only applicable if a dedicated proxy Linux machine has been configured).\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/iboss/Data%20Connectors/iboss_cef.json","true" +"CommonSecurityLog","iboss","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/iboss","iboss","iboss-sentinel-connector","2022-02-15","","","iboss","Partner","https://www.iboss.com/contact-us/","","domains","ibossAma","iboss","iboss via AMA","The [iboss](https://www.iboss.com) data connector enables you to seamlessly connect your Threat Console to Microsoft Sentinel and enrich your instance with iboss URL event logs. Our logs are forwarded in Common Event Format (CEF) over Syslog and the configuration required can be completed on the iboss platform without the use of a proxy. Take advantage of our connector to garner critical data points and gain insight into security threats.","[{""title"": ""Configure AMA Data Connector"", ""description"": ""Steps to configure the iboss AMA Data Connector"", ""instructions"": [{""parameters"": {""title"": ""Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Gather Required Configuration Details in Azure Arc"", ""description"": ""1. Navigate to Azure Arc ---> Azure Arc Resources ---> Machines.\n\n2. Add a machine ---> Add a single server ---> Generate script.\n\n3. Select the resource group, this should be the same group as the Log Analytics Workspace for your Microsoft Sentinel instance you will be using\n\n4. Select a region and ensure it is in the same region as your Log Analytics Workspace\n\n5. Select Linux as Operating System\n\n6. Click Next\n\n7. Download the script and use this information for the next step when configuring your Microsoft Sentinel AMA integration iboss side.\n\n8. Navigate to the Log Analytics Workspace of your Microsoft Sentinel instance and find it's resource group, workspace name, and workspace id""}, {""title"": ""Step B. Forward Common Event Format (CEF) logs"", ""description"": ""Set your Threat Console to send Syslog messages in CEF format to your Azure workspace. (Ensure you have the information gathered from the previous section)\n\n>1. Navigate to the Integrations Marketplace inside your iboss Console\n\n>2. Select Microsoft Sentinel AMA Log Forwarding\n\n>3. Select Add Integration\n\n4. Use the information from the script and your log analytics workspace to configure the integration.\n\n5. Add the integration\n\n>6. An email with be sent to your iboss alerts email to authenticate. Please do so within five minutes\n\n7. After authenticating, wait 15 to 20 minutes and ensure the Microsoft Sentinel Status of your integration is successful.""}, {""title"": ""Step C. Validate connection"", ""description"": ""1. Follow the instructions to validate your connectivity:\n\n2. Open Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n3. It may take about 20 minutes until the connection streams data to your workspace.""}]}, ""type"": ""InstructionStepsGroup""}]}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/iboss/Data%20Connectors/template_ibossAMA.json","true" +"CommonSecurityLog","vArmour Application Controller","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/vArmour%20Application%20Controller","varmournetworks","varmour_sentinel","2022-06-01","","","vArmour Networks","Partner","https://www.varmour.com/contact-us/","","domains","vArmourAC","vArmour","[Deprecated] vArmour Application Controller via Legacy Agent","vArmour reduces operational risk and increases cyber resiliency by visualizing and controlling application relationships across the enterprise. This vArmour connector enables streaming of Application Controller Violation Alerts into Microsoft Sentinel, so you can take advantage of search & correlation, alerting, & threat intelligence enrichment for each log.","[{""title"": ""1. Linux Syslog agent configuration"", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""innerSteps"": [{""title"": ""1.1 Select or create a Linux machine"", ""description"": ""Select or create a Linux machine that Microsoft Sentinel will use as the proxy between your security solution and Microsoft Sentinel this machine can be on your on-prem environment, Azure or other clouds.""}, {""title"": ""1.2 Install the CEF collector on the Linux machine"", ""description"": ""Install the Microsoft Monitoring Agent on your Linux machine and configure the machine to listen on the necessary port and forward messages to your Microsoft Sentinel workspace. The CEF collector collects CEF messages on port 514 TCP.\n\n> 1. Make sure that you have Python on your machine using the following command: python -version.\n\n> 2. You must have elevated permissions (sudo) on your machine."", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId"", ""PrimaryKey""], ""label"": ""Run the following command to install and apply the CEF collector:"", ""value"": ""sudo wget -O cef_installer.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_installer.py&&sudo python cef_installer.py {0} {1}""}, ""type"": ""CopyableLabel""}]}]}, {""title"": ""2. Configure the vArmour Application Controller to forward Common Event Format (CEF) logs to the Syslog agent"", ""description"": ""Send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address."", ""innerSteps"": [{""title"": ""2.1 Download the vArmour Application Controller user guide"", ""description"": ""Download the user guide from https://support.varmour.com/hc/en-us/articles/360057444831-vArmour-Application-Controller-6-0-User-Guide.""}, {""title"": ""2.2 Configure the Application Controller to Send Policy Violations"", ""description"": ""In the user guide - refer to \""Configuring Syslog for Monitoring and Violations\"" and follow steps 1 to 3.""}]}, {""title"": ""3. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\n>It may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n> 1. Make sure that you have Python on your machine using the following command: python -version\n\n>2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""fillWith"": [""WorkspaceId""], ""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O cef_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/CEF/cef_troubleshoot.py&&sudo python cef_troubleshoot.py {0}""}, ""type"": ""CopyableLabel""}]}, {""title"": ""4. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/vArmour%20Application%20Controller/Data%20Connectors/Connector_vArmour_AppController_CEF.json","true" +"CommonSecurityLog","vArmour Application Controller","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/vArmour%20Application%20Controller","varmournetworks","varmour_sentinel","2022-06-01","","","vArmour Networks","Partner","https://www.varmour.com/contact-us/","","domains","vArmourACAma","vArmour","[Deprecated] vArmour Application Controller via AMA","vArmour reduces operational risk and increases cyber resiliency by visualizing and controlling application relationships across the enterprise. This vArmour connector enables streaming of Application Controller Violation Alerts into Microsoft Sentinel, so you can take advantage of search & correlation, alerting, & threat intelligence enrichment for each log.","[{""title"": """", ""description"": ""Install and configure the Linux agent to collect your Common Event Format (CEF) Syslog messages and forward them to Microsoft Sentinel.\n\n> Notice that the data from all regions will be stored in the selected workspace"", ""instructions"": [{""parameters"": {""title"": ""1. Kindly follow the steps to configure the data connector"", ""instructionSteps"": [{""title"": ""Step A. Configure the Common Event Format (CEF) via AMA data connector"", ""description"": ""_Note:- CEF logs are collected only from Linux Agents_\n\n1. Navigate to Microsoft Sentinel workspace ---> configuration ---> Data connector blade .\n\n2. Search for 'Common Event Format (CEF) via AMA' data connector and open it.\n\n3. Check If there is no existing DCR configured to collect required facility of logs, Create a new DCR (Data Collection Rule)\n\n\t_Note:- It is recommended to install minimum 1.27 version of AMA agent [Learn more](https://learn.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-manage?tabs=azure-portal ) and ensure there is no duplicate DCR as it can cause log duplicacy_\n\n4. Run the command provided in the CEF via AMA data connector page to configure the CEF collector on the machine""}, {""title"": ""Step B. Configure the vArmour Application Controller to forward Common Event Format (CEF) logs to the Syslog agent"", ""description"": ""Send Syslog messages in CEF format to the proxy machine. Make sure you to send the logs to port 514 TCP on the machine's IP address."", ""innerSteps"": [{""title"": ""1 Download the vArmour Application Controller user guide"", ""description"": ""Download the user guide from https://support.varmour.com/hc/en-us/articles/360057444831-vArmour-Application-Controller-6-0-User-Guide.""}, {""title"": ""2 Configure the Application Controller to Send Policy Violations"", ""description"": ""In the user guide - refer to \""Configuring Syslog for Monitoring and Violations\"" and follow steps 1 to 3.""}]}, {""title"": ""Step C. Validate connection"", ""description"": ""Follow the instructions to validate your connectivity:\n\nOpen Log Analytics to check if the logs are received using the CommonSecurityLog schema.\n\nIt may take about 20 minutes until the connection streams data to your workspace.\n\nIf the logs are not received, run the following connectivity validation script:\n\n 1. Make sure that you have Python on your machine using the following command: python -version\n\n2. You must have elevated permissions (sudo) on your machine"", ""instructions"": [{""parameters"": {""label"": ""Run the following command to validate your connectivity:"", ""value"": ""sudo wget -O Sentinel_AMA_troubleshoot.py https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/DataConnectors/Syslog/Sentinel_AMA_troubleshoot.py&&sudo python Sentinel_AMA_troubleshoot.py --cef""}, ""type"": ""CopyableLabel""}]}]}, ""type"": ""InstructionStepsGroup""}]}, {""title"": ""2. Secure your machine "", ""description"": ""Make sure to configure the machine's security according to your organization's security policy\n\n\n[Learn more >](https://aka.ms/SecureCEF)""}]","{""resourceProvider"": [{""provider"": ""Microsoft.OperationalInsights/workspaces"", ""permissionsDisplayText"": ""read and write permissions are required."", ""providerDisplayName"": ""Workspace"", ""scope"": ""Workspace"", ""requiredPermissions"": {""read"": true, ""write"": true, ""delete"": true}}, {""provider"": ""Microsoft.OperationalInsights/workspaces/sharedKeys"", ""permissionsDisplayText"": ""read permissions to shared keys for the workspace are required. [See the documentation to learn more about workspace keys](https://docs.microsoft.com/azure/azure-monitor/platform/agent-windows#obtain-workspace-id-and-key)."", ""providerDisplayName"": ""Keys"", ""scope"": ""Workspace"", ""requiredPermissions"": {""action"": true}}], ""customs"": [{""description"": ""To collect data from non-Azure VMs, they must have Azure Arc installed and enabled. [Learn more](https://docs.microsoft.com/azure/azure-monitor/agents/azure-monitor-agent-install?tabs=ARMAgentPowerShell,PowerShellWindows,PowerShellWindowsArc,CLIWindows,CLIWindowsArc)""}, {""description"": ""Common Event Format (CEF) via AMA and Syslog via AMA data connectors must be installed [Learn more](https://learn.microsoft.com/azure/sentinel/connect-cef-ama#open-the-connector-page-and-create-the-dcr)""}]}","https://github.com/Azure/Azure-Sentinel/blob/master/Solutions/vArmour%20Application%20Controller/Data%20Connectors/template_vArmour_AppControllerAMA.json","true"